From 42920004671c65edb7b14489401dcb73475bb306 Mon Sep 17 00:00:00 2001 From: Jose Manuel Heredia Hidalgo Date: Thu, 1 Apr 2021 23:10:40 +0000 Subject: [PATCH 1/6] Updates for Latest Swagger Changes --- .../review/synapse-artifacts.api.md | 752 +++- .../synapse-artifacts/src/artifactsClient.ts | 55 +- .../src/artifactsClientContext.ts | 16 +- sdk/synapse/synapse-artifacts/src/index.ts | 2 +- .../src/lro/azureAsyncOperationStrategy.ts | 42 +- .../src/lro/bodyPollingStrategy.ts | 5 +- .../src/lro/locationStrategy.ts | 7 +- .../synapse-artifacts/src/lro/lroPolicy.ts | 8 +- .../synapse-artifacts/src/lro/lroPoller.ts | 27 +- .../synapse-artifacts/src/lro/models.ts | 8 +- .../synapse-artifacts/src/lro/operation.ts | 4 +- .../synapse-artifacts/src/lro/requestUtils.ts | 16 +- .../synapse-artifacts/src/models/index.ts | 1303 ++++++- .../synapse-artifacts/src/models/mappers.ts | 3197 +++++++++++++++-- .../src/operations/bigDataPools.ts | 37 +- .../src/operations/dataFlow.ts | 50 +- .../src/operations/dataFlowDebugSession.ts | 58 +- .../src/operations/dataset.ts | 53 +- .../src/operations/integrationRuntimes.ts | 40 +- .../src/operations/library.ts | 101 +- .../src/operations/linkedService.ts | 51 +- .../src/operations/notebook.ts | 71 +- .../src/operations/pipeline.ts | 59 +- .../src/operations/pipelineRun.ts | 43 +- .../src/operations/sparkJobDefinition.ts | 56 +- .../src/operations/sqlPools.ts | 35 +- .../src/operations/sqlScript.ts | 52 +- .../src/operations/trigger.ts | 69 +- .../src/operations/triggerRun.ts | 45 +- .../src/operations/workspace.ts | 22 +- .../operations/workspaceGitRepoManagement.ts | 20 +- .../src/operationsInterfaces/bigDataPools.ts | 28 + .../src/operationsInterfaces/dataFlow.ts | 72 + .../dataFlowDebugSession.ts | 70 + .../src/operationsInterfaces/dataset.ts | 72 + .../src/operationsInterfaces/index.ts | 25 + .../integrationRuntimes.ts | 33 + .../src/operationsInterfaces/library.ts | 93 + .../src/operationsInterfaces/linkedService.ts | 72 + .../src/operationsInterfaces/notebook.ts | 79 + .../src/operationsInterfaces/pipeline.ts | 83 + .../src/operationsInterfaces/pipelineRun.ts | 60 + .../sparkJobDefinition.ts | 94 + .../src/operationsInterfaces/sqlPools.ts | 28 + .../src/operationsInterfaces/sqlScript.ts | 72 + .../src/operationsInterfaces/trigger.ts | 108 + .../src/operationsInterfaces/triggerRun.ts | 48 + .../src/operationsInterfaces/workspace.ts | 19 + .../workspaceGitRepoManagement.ts | 26 + sdk/synapse/synapse-artifacts/src/tracing.ts | 13 +- .../synapse-artifacts/swagger/README.md | 2 +- 51 files changed, 6664 insertions(+), 737 deletions(-) create mode 100644 sdk/synapse/synapse-artifacts/src/operationsInterfaces/bigDataPools.ts create mode 100644 sdk/synapse/synapse-artifacts/src/operationsInterfaces/dataFlow.ts create mode 100644 sdk/synapse/synapse-artifacts/src/operationsInterfaces/dataFlowDebugSession.ts create mode 100644 sdk/synapse/synapse-artifacts/src/operationsInterfaces/dataset.ts create mode 100644 sdk/synapse/synapse-artifacts/src/operationsInterfaces/index.ts create mode 100644 sdk/synapse/synapse-artifacts/src/operationsInterfaces/integrationRuntimes.ts create mode 100644 sdk/synapse/synapse-artifacts/src/operationsInterfaces/library.ts create mode 100644 sdk/synapse/synapse-artifacts/src/operationsInterfaces/linkedService.ts create mode 100644 sdk/synapse/synapse-artifacts/src/operationsInterfaces/notebook.ts create mode 100644 sdk/synapse/synapse-artifacts/src/operationsInterfaces/pipeline.ts create mode 100644 sdk/synapse/synapse-artifacts/src/operationsInterfaces/pipelineRun.ts create mode 100644 sdk/synapse/synapse-artifacts/src/operationsInterfaces/sparkJobDefinition.ts create mode 100644 sdk/synapse/synapse-artifacts/src/operationsInterfaces/sqlPools.ts create mode 100644 sdk/synapse/synapse-artifacts/src/operationsInterfaces/sqlScript.ts create mode 100644 sdk/synapse/synapse-artifacts/src/operationsInterfaces/trigger.ts create mode 100644 sdk/synapse/synapse-artifacts/src/operationsInterfaces/triggerRun.ts create mode 100644 sdk/synapse/synapse-artifacts/src/operationsInterfaces/workspace.ts create mode 100644 sdk/synapse/synapse-artifacts/src/operationsInterfaces/workspaceGitRepoManagement.ts diff --git a/sdk/synapse/synapse-artifacts/review/synapse-artifacts.api.md b/sdk/synapse/synapse-artifacts/review/synapse-artifacts.api.md index d0ad61c88a5b..7f3495a8d7e9 100644 --- a/sdk/synapse/synapse-artifacts/review/synapse-artifacts.api.md +++ b/sdk/synapse/synapse-artifacts/review/synapse-artifacts.api.md @@ -73,6 +73,12 @@ export interface AddDataFlowToDebugSessionResponse { jobVersion?: string; } +// @public +export interface AdditionalColumns { + name?: any; + value?: any; +} + // @public export type AmazonMWSLinkedService = LinkedService & { type: "AmazonMWS"; @@ -126,12 +132,27 @@ export type AmazonRedshiftTableDataset = Dataset & { schemaTypePropertiesSchema?: any; }; +// @public +export type AmazonS3Dataset = Dataset & { + type: "AmazonS3Object"; + bucketName: any; + key?: any; + prefix?: any; + version?: any; + modifiedDatetimeStart?: any; + modifiedDatetimeEnd?: any; + format?: DatasetStorageFormatUnion; + compression?: DatasetCompressionUnion; +}; + // @public export type AmazonS3LinkedService = LinkedService & { type: "AmazonS3"; + authenticationType?: any; accessKeyId?: any; secretAccessKey?: SecretBaseUnion; serviceUrl?: any; + sessionToken?: SecretBaseUnion; encryptedCredential?: any; }; @@ -149,7 +170,10 @@ export type AmazonS3ReadSettings = StoreReadSettings & { wildcardFolderPath?: any; wildcardFileName?: any; prefix?: any; + fileListPath?: any; enablePartitionDiscovery?: boolean; + partitionRootPath?: any; + deleteFilesAfterCompletion?: any; modifiedDatetimeStart?: any; modifiedDatetimeEnd?: any; }; @@ -240,7 +264,7 @@ export type AvroCompressionCodec = string; export type AvroDataset = Dataset & { type: "Avro"; location?: DatasetLocationUnion; - avroCompressionCodec?: AvroCompressionCodec; + avroCompressionCodec?: any; avroCompressionLevel?: number; }; @@ -260,6 +284,7 @@ export type AvroSink = CopySink & { export type AvroSource = CopySource & { type: "AvroSource"; storeSettings?: StoreReadSettingsUnion; + additionalColumns?: AdditionalColumns[]; }; // @public @@ -267,6 +292,8 @@ export type AvroWriteSettings = FormatWriteSettings & { type: "AvroWriteSettings"; recordName?: string; recordNamespace?: string; + maxRowsPerFile?: any; + fileNamePrefix?: any; }; // @public @@ -280,6 +307,27 @@ export type AzureBatchLinkedService = LinkedService & { encryptedCredential?: any; }; +// @public +export type AzureBlobDataset = Dataset & { + type: "AzureBlob"; + folderPath?: any; + tableRootLocation?: any; + fileName?: any; + modifiedDatetimeStart?: any; + modifiedDatetimeEnd?: any; + format?: DatasetStorageFormatUnion; + compression?: DatasetCompressionUnion; +}; + +// @public +export type AzureBlobFSDataset = Dataset & { + type: "AzureBlobFSFile"; + folderPath?: any; + fileName?: any; + format?: DatasetStorageFormatUnion; + compression?: DatasetCompressionUnion; +}; + // @public export type AzureBlobFSLinkedService = LinkedService & { type: "AzureBlobFS"; @@ -288,6 +336,7 @@ export type AzureBlobFSLinkedService = LinkedService & { servicePrincipalId?: any; servicePrincipalKey?: SecretBaseUnion; tenant?: any; + azureCloudType?: any; encryptedCredential?: any; }; @@ -303,7 +352,10 @@ export type AzureBlobFSReadSettings = StoreReadSettings & { recursive?: any; wildcardFolderPath?: any; wildcardFileName?: any; + fileListPath?: any; enablePartitionDiscovery?: boolean; + partitionRootPath?: any; + deleteFilesAfterCompletion?: any; modifiedDatetimeStart?: any; modifiedDatetimeEnd?: any; }; @@ -339,6 +391,7 @@ export type AzureBlobStorageLinkedService = LinkedService & { servicePrincipalId?: any; servicePrincipalKey?: SecretBaseUnion; tenant?: any; + azureCloudType?: any; encryptedCredential?: string; }; @@ -355,7 +408,10 @@ export type AzureBlobStorageReadSettings = StoreReadSettings & { wildcardFolderPath?: any; wildcardFileName?: any; prefix?: any; + fileListPath?: any; enablePartitionDiscovery?: boolean; + partitionRootPath?: any; + deleteFilesAfterCompletion?: any; modifiedDatetimeStart?: any; modifiedDatetimeEnd?: any; }; @@ -366,11 +422,57 @@ export type AzureBlobStorageWriteSettings = StoreWriteSettings & { blockSizeInMB?: any; }; +// @public +export type AzureDatabricksDeltaLakeDataset = Dataset & { + type: "AzureDatabricksDeltaLakeDataset"; + table?: any; + database?: any; +}; + +// @public +export type AzureDatabricksDeltaLakeExportCommand = ExportSettings & { + type: "AzureDatabricksDeltaLakeExportCommand"; + dateFormat?: any; + timestampFormat?: any; +}; + +// @public +export type AzureDatabricksDeltaLakeImportCommand = ImportSettings & { + type: "AzureDatabricksDeltaLakeImportCommand"; + dateFormat?: any; + timestampFormat?: any; +}; + +// @public +export type AzureDatabricksDeltaLakeLinkedService = LinkedService & { + type: "AzureDatabricksDeltaLake"; + domain: any; + accessToken: SecretBaseUnion; + clusterId?: any; + encryptedCredential?: any; +}; + +// @public +export type AzureDatabricksDeltaLakeSink = CopySink & { + type: "AzureDatabricksDeltaLakeSink"; + preCopyScript?: any; + importSettings?: AzureDatabricksDeltaLakeImportCommand; +}; + +// @public +export type AzureDatabricksDeltaLakeSource = CopySource & { + type: "AzureDatabricksDeltaLakeSource"; + query?: any; + exportSettings?: AzureDatabricksDeltaLakeExportCommand; +}; + // @public export type AzureDatabricksLinkedService = LinkedService & { type: "AzureDatabricks"; domain: any; - accessToken: SecretBaseUnion; + accessToken?: SecretBaseUnion; + authentication?: any; + workspaceResourceId?: any; existingClusterId?: any; instancePoolId?: any; newClusterVersion?: any; @@ -385,10 +487,12 @@ export type AzureDatabricksLinkedService = LinkedService & { newClusterCustomTags?: { [propertyName: string]: any; }; + newClusterLogDestination?: any; newClusterDriverNodeType?: any; newClusterInitScripts?: any; newClusterEnableElasticDisk?: any; encryptedCredential?: any; + policyId?: any; }; // @public @@ -422,6 +526,7 @@ export type AzureDataExplorerSource = CopySource & { query: any; noTruncation?: any; queryTimeout?: any; + additionalColumns?: AdditionalColumns[]; }; // @public @@ -443,6 +548,15 @@ export type AzureDataLakeAnalyticsLinkedService = LinkedService & { encryptedCredential?: any; }; +// @public +export type AzureDataLakeStoreDataset = Dataset & { + type: "AzureDataLakeStoreFile"; + folderPath?: any; + fileName?: any; + format?: DatasetStorageFormatUnion; + compression?: DatasetCompressionUnion; +}; + // @public export type AzureDataLakeStoreLinkedService = LinkedService & { type: "AzureDataLakeStore"; @@ -450,6 +564,7 @@ export type AzureDataLakeStoreLinkedService = LinkedService & { servicePrincipalId?: any; servicePrincipalKey?: SecretBaseUnion; tenant?: any; + azureCloudType?: any; accountName?: any; subscriptionId?: any; resourceGroupName?: any; @@ -467,7 +582,12 @@ export type AzureDataLakeStoreReadSettings = StoreReadSettings & { recursive?: any; wildcardFolderPath?: any; wildcardFileName?: any; + fileListPath?: any; + listAfter?: any; + listBefore?: any; enablePartitionDiscovery?: boolean; + partitionRootPath?: any; + deleteFilesAfterCompletion?: any; modifiedDatetimeStart?: any; modifiedDatetimeEnd?: any; }; @@ -488,6 +608,7 @@ export type AzureDataLakeStoreSource = CopySource & { // @public export type AzureDataLakeStoreWriteSettings = StoreWriteSettings & { type: "AzureDataLakeStoreWriteSettings"; + expiryDateTime?: any; }; // @public @@ -501,6 +622,12 @@ export type AzureFileStorageLinkedService = LinkedService & { host: any; userId?: any; password?: SecretBaseUnion; + connectionString?: any; + accountKey?: AzureKeyVaultSecretReference; + sasUri?: any; + sasToken?: AzureKeyVaultSecretReference; + fileShare?: any; + snapshot?: any; encryptedCredential?: any; }; @@ -515,11 +642,20 @@ export type AzureFileStorageReadSettings = StoreReadSettings & { recursive?: any; wildcardFolderPath?: any; wildcardFileName?: any; + prefix?: any; + fileListPath?: any; enablePartitionDiscovery?: boolean; + partitionRootPath?: any; + deleteFilesAfterCompletion?: any; modifiedDatetimeStart?: any; modifiedDatetimeEnd?: any; }; +// @public +export type AzureFileStorageWriteSettings = StoreWriteSettings & { + type: "AzureFileStorageWriteSettings"; +}; + // @public export type AzureFunctionActivity = ExecutionActivity & { type: "AzureFunctionActivity"; @@ -727,6 +863,7 @@ export type AzureSqlDatabaseLinkedService = LinkedService & { servicePrincipalId?: any; servicePrincipalKey?: SecretBaseUnion; tenant?: any; + azureCloudType?: any; encryptedCredential?: any; }; @@ -738,6 +875,7 @@ export type AzureSqlDWLinkedService = LinkedService & { servicePrincipalId?: any; servicePrincipalKey?: SecretBaseUnion; tenant?: any; + azureCloudType?: any; encryptedCredential?: any; }; @@ -757,6 +895,7 @@ export type AzureSqlMILinkedService = LinkedService & { servicePrincipalId?: any; servicePrincipalKey?: SecretBaseUnion; tenant?: any; + azureCloudType?: any; encryptedCredential?: any; }; @@ -790,6 +929,8 @@ export type AzureSqlSource = TabularSource & { [propertyName: string]: StoredProcedureParameter; }; produceAdditionalTypes?: any; + partitionOption?: any; + partitionSettings?: SqlPartitionSettings; }; // @public @@ -901,8 +1042,7 @@ export type BigDataPoolsListResponse = BigDataPoolResourceInfoListResult & { }; // @public -export class BigDataPoolsOperation { - constructor(client: ArtifactsClientContext); +export interface BigDataPoolsOperation { get(bigDataPoolName: string, options?: coreHttp.OperationOptions): Promise; list(options?: coreHttp.OperationOptions): Promise; } @@ -914,6 +1054,12 @@ export type BinaryDataset = Dataset & { compression?: DatasetCompressionUnion; }; +// @public +export type BinaryReadSettings = FormatReadSettings & { + type: "BinaryReadSettings"; + compressionProperties?: CompressionReadSettingsUnion; +}; + // @public export type BinarySink = CopySink & { type: "BinarySink"; @@ -924,6 +1070,7 @@ export type BinarySink = CopySink & { export type BinarySource = CopySource & { type: "BinarySource"; storeSettings?: StoreReadSettingsUnion; + formatSettings?: BinaryReadSettings; }; // @public @@ -1029,7 +1176,7 @@ export type CommonDataServiceForAppsLinkedService = LinkedService & { username?: any; password?: SecretBaseUnion; servicePrincipalId?: any; - servicePrincipalCredentialType?: DynamicsServicePrincipalCredentialType; + servicePrincipalCredentialType?: any; servicePrincipalCredential?: SecretBaseUnion; encryptedCredential?: any; }; @@ -1046,11 +1193,25 @@ export type CommonDataServiceForAppsSink = CopySink & { export type CommonDataServiceForAppsSource = CopySource & { type: "CommonDataServiceForAppsSource"; query?: any; + additionalColumns?: AdditionalColumns[]; }; +// @public +export type CompressionCodec = string; + +// @public +export interface CompressionReadSettings { + [property: string]: any; + type: "ZipDeflateReadSettings" | "TarReadSettings" | "TarGZipReadSettings"; +} + +// @public (undocumented) +export type CompressionReadSettingsUnion = CompressionReadSettings | ZipDeflateReadSettings | TarReadSettings | TarGZipReadSettings; + // @public export type ConcurLinkedService = LinkedService & { type: "Concur"; + connectionProperties?: any; clientId: any; username: any; password?: SecretBaseUnion; @@ -1094,10 +1255,20 @@ export type CopyActivity = ExecutionActivity & { dataIntegrationUnits?: any; enableSkipIncompatibleRow?: any; redirectIncompatibleRowSettings?: RedirectIncompatibleRowSettings; + logStorageSettings?: LogStorageSettings; + logSettings?: LogSettings; preserveRules?: any[]; preserve?: any[]; + validateDataConsistency?: any; + skipErrorFile?: SkipErrorFile; }; +// @public +export interface CopyActivityLogSettings { + enableReliableLogging?: any; + logLevel?: any; +} + // @public export type CopyBehaviorType = string; @@ -1107,13 +1278,13 @@ export interface CopySink { maxConcurrentConnections?: any; sinkRetryCount?: any; sinkRetryWait?: any; - type: "DelimitedTextSink" | "JsonSink" | "OrcSink" | "AzurePostgreSqlSink" | "AzureMySqlSink" | "SapCloudForCustomerSink" | "AzureQueueSink" | "AzureTableSink" | "AvroSink" | "ParquetSink" | "BinarySink" | "BlobSink" | "FileSystemSink" | "DocumentDbCollectionSink" | "CosmosDbSqlApiSink" | "SqlSink" | "SqlServerSink" | "AzureSqlSink" | "SqlMISink" | "SqlDWSink" | "OracleSink" | "AzureDataLakeStoreSink" | "AzureBlobFSSink" | "AzureSearchIndexSink" | "OdbcSink" | "InformixSink" | "MicrosoftAccessSink" | "DynamicsSink" | "DynamicsCrmSink" | "CommonDataServiceForAppsSink" | "AzureDataExplorerSink" | "SalesforceSink" | "SalesforceServiceCloudSink" | "CosmosDbMongoDbApiSink"; + type: "DelimitedTextSink" | "JsonSink" | "OrcSink" | "RestSink" | "AzurePostgreSqlSink" | "AzureMySqlSink" | "AzureDatabricksDeltaLakeSink" | "SapCloudForCustomerSink" | "AzureQueueSink" | "AzureTableSink" | "AvroSink" | "ParquetSink" | "BinarySink" | "BlobSink" | "FileSystemSink" | "DocumentDbCollectionSink" | "CosmosDbSqlApiSink" | "SqlSink" | "SqlServerSink" | "AzureSqlSink" | "SqlMISink" | "SqlDWSink" | "SnowflakeSink" | "OracleSink" | "AzureDataLakeStoreSink" | "AzureBlobFSSink" | "AzureSearchIndexSink" | "OdbcSink" | "InformixSink" | "MicrosoftAccessSink" | "DynamicsSink" | "DynamicsCrmSink" | "CommonDataServiceForAppsSink" | "AzureDataExplorerSink" | "SalesforceSink" | "SalesforceServiceCloudSink" | "CosmosDbMongoDbApiSink"; writeBatchSize?: any; writeBatchTimeout?: any; } // @public (undocumented) -export type CopySinkUnion = CopySink | DelimitedTextSink | JsonSink | OrcSink | AzurePostgreSqlSink | AzureMySqlSink | SapCloudForCustomerSink | AzureQueueSink | AzureTableSink | AvroSink | ParquetSink | BinarySink | BlobSink | FileSystemSink | DocumentDbCollectionSink | CosmosDbSqlApiSink | SqlSink | SqlServerSink | AzureSqlSink | SqlMISink | SqlDWSink | OracleSink | AzureDataLakeStoreSink | AzureBlobFSSink | AzureSearchIndexSink | OdbcSink | InformixSink | MicrosoftAccessSink | DynamicsSink | DynamicsCrmSink | CommonDataServiceForAppsSink | AzureDataExplorerSink | SalesforceSink | SalesforceServiceCloudSink | CosmosDbMongoDbApiSink; +export type CopySinkUnion = CopySink | DelimitedTextSink | JsonSink | OrcSink | RestSink | AzurePostgreSqlSink | AzureMySqlSink | AzureDatabricksDeltaLakeSink | SapCloudForCustomerSink | AzureQueueSink | AzureTableSink | AvroSink | ParquetSink | BinarySink | BlobSink | FileSystemSink | DocumentDbCollectionSink | CosmosDbSqlApiSink | SqlSink | SqlServerSink | AzureSqlSink | SqlMISink | SqlDWSink | SnowflakeSink | OracleSink | AzureDataLakeStoreSink | AzureBlobFSSink | AzureSearchIndexSink | OdbcSink | InformixSink | MicrosoftAccessSink | DynamicsSink | DynamicsCrmSink | CommonDataServiceForAppsSink | AzureDataExplorerSink | SalesforceSink | SalesforceServiceCloudSink | CosmosDbMongoDbApiSink; // @public export interface CopySource { @@ -1121,11 +1292,11 @@ export interface CopySource { maxConcurrentConnections?: any; sourceRetryCount?: any; sourceRetryWait?: any; - type: "AvroSource" | "ParquetSource" | "DelimitedTextSource" | "JsonSource" | "OrcSource" | "BinarySource" | "TabularSource" | "AzureTableSource" | "BlobSource" | "DocumentDbCollectionSource" | "CosmosDbSqlApiSource" | "DynamicsSource" | "DynamicsCrmSource" | "CommonDataServiceForAppsSource" | "RelationalSource" | "InformixSource" | "MicrosoftAccessSource" | "Db2Source" | "OdbcSource" | "MySqlSource" | "PostgreSqlSource" | "SybaseSource" | "SapBwSource" | "ODataSource" | "SalesforceSource" | "SalesforceServiceCloudSource" | "SapCloudForCustomerSource" | "SapEccSource" | "SapHanaSource" | "SapOpenHubSource" | "SapTableSource" | "RestSource" | "SqlSource" | "SqlServerSource" | "AzureSqlSource" | "SqlMISource" | "SqlDWSource" | "FileSystemSource" | "HdfsSource" | "AzureMySqlSource" | "AzureDataExplorerSource" | "OracleSource" | "TeradataSource" | "WebSource" | "CassandraSource" | "MongoDbSource" | "MongoDbV2Source" | "CosmosDbMongoDbApiSource" | "Office365Source" | "AzureDataLakeStoreSource" | "AzureBlobFSSource" | "HttpSource" | "AmazonMWSSource" | "AzurePostgreSqlSource" | "ConcurSource" | "CouchbaseSource" | "DrillSource" | "EloquaSource" | "GoogleBigQuerySource" | "GreenplumSource" | "HBaseSource" | "HiveSource" | "HubspotSource" | "ImpalaSource" | "JiraSource" | "MagentoSource" | "MariaDBSource" | "AzureMariaDBSource" | "MarketoSource" | "PaypalSource" | "PhoenixSource" | "PrestoSource" | "QuickBooksSource" | "ServiceNowSource" | "ShopifySource" | "SparkSource" | "SquareSource" | "XeroSource" | "ZohoSource" | "NetezzaSource" | "VerticaSource" | "SalesforceMarketingCloudSource" | "ResponsysSource" | "DynamicsAXSource" | "OracleServiceCloudSource" | "GoogleAdWordsSource" | "AmazonRedshiftSource"; + type: "AvroSource" | "ExcelSource" | "ParquetSource" | "DelimitedTextSource" | "JsonSource" | "XmlSource" | "OrcSource" | "BinarySource" | "TabularSource" | "AzureTableSource" | "BlobSource" | "DocumentDbCollectionSource" | "CosmosDbSqlApiSource" | "DynamicsSource" | "DynamicsCrmSource" | "CommonDataServiceForAppsSource" | "RelationalSource" | "InformixSource" | "MicrosoftAccessSource" | "Db2Source" | "OdbcSource" | "MySqlSource" | "PostgreSqlSource" | "SybaseSource" | "SapBwSource" | "ODataSource" | "SalesforceSource" | "SalesforceServiceCloudSource" | "SapCloudForCustomerSource" | "SapEccSource" | "SapHanaSource" | "SapOpenHubSource" | "SapTableSource" | "RestSource" | "SqlSource" | "SqlServerSource" | "AzureSqlSource" | "SqlMISource" | "SqlDWSource" | "FileSystemSource" | "HdfsSource" | "AzureMySqlSource" | "AzureDataExplorerSource" | "OracleSource" | "TeradataSource" | "WebSource" | "CassandraSource" | "MongoDbSource" | "MongoDbAtlasSource" | "MongoDbV2Source" | "CosmosDbMongoDbApiSource" | "Office365Source" | "AzureDataLakeStoreSource" | "AzureBlobFSSource" | "HttpSource" | "AmazonMWSSource" | "AzurePostgreSqlSource" | "ConcurSource" | "CouchbaseSource" | "DrillSource" | "EloquaSource" | "GoogleBigQuerySource" | "GreenplumSource" | "HBaseSource" | "HiveSource" | "HubspotSource" | "ImpalaSource" | "JiraSource" | "MagentoSource" | "MariaDBSource" | "AzureMariaDBSource" | "MarketoSource" | "PaypalSource" | "PhoenixSource" | "PrestoSource" | "QuickBooksSource" | "ServiceNowSource" | "ShopifySource" | "SparkSource" | "SquareSource" | "XeroSource" | "ZohoSource" | "NetezzaSource" | "VerticaSource" | "SalesforceMarketingCloudSource" | "ResponsysSource" | "DynamicsAXSource" | "OracleServiceCloudSource" | "GoogleAdWordsSource" | "AmazonRedshiftSource" | "SnowflakeSource" | "AzureDatabricksDeltaLakeSource" | "SharePointOnlineListSource"; } // @public (undocumented) -export type CopySourceUnion = CopySource | AvroSource | ParquetSource | DelimitedTextSource | JsonSource | OrcSource | BinarySource | TabularSourceUnion | BlobSource | DocumentDbCollectionSource | CosmosDbSqlApiSource | DynamicsSource | DynamicsCrmSource | CommonDataServiceForAppsSource | RelationalSource | MicrosoftAccessSource | ODataSource | SalesforceServiceCloudSource | RestSource | FileSystemSource | HdfsSource | AzureDataExplorerSource | OracleSource | WebSource | MongoDbSource | MongoDbV2Source | CosmosDbMongoDbApiSource | Office365Source | AzureDataLakeStoreSource | AzureBlobFSSource | HttpSource; +export type CopySourceUnion = CopySource | AvroSource | ExcelSource | ParquetSource | DelimitedTextSource | JsonSource | XmlSource | OrcSource | BinarySource | TabularSourceUnion | BlobSource | DocumentDbCollectionSource | CosmosDbSqlApiSource | DynamicsSource | DynamicsCrmSource | CommonDataServiceForAppsSource | RelationalSource | MicrosoftAccessSource | ODataSource | SalesforceServiceCloudSource | RestSource | FileSystemSource | HdfsSource | AzureDataExplorerSource | OracleSource | WebSource | MongoDbSource | MongoDbAtlasSource | MongoDbV2Source | CosmosDbMongoDbApiSource | Office365Source | AzureDataLakeStoreSource | AzureBlobFSSource | HttpSource | SnowflakeSource | AzureDatabricksDeltaLakeSource | SharePointOnlineListSource; // @public export interface CopyTranslator { @@ -1172,6 +1343,7 @@ export type CosmosDbMongoDbApiSource = CopySource & { cursorMethods?: MongoDbCursorMethodsProperties; batchSize?: any; queryTimeout?: any; + additionalColumns?: AdditionalColumns[]; }; // @public @@ -1192,6 +1364,8 @@ export type CosmosDbSqlApiSource = CopySource & { query?: any; pageSize?: any; preferredRegions?: any; + detectDatetime?: any; + additionalColumns?: AdditionalColumns[]; }; // @public @@ -1245,6 +1419,7 @@ export type CustomActivity = ExecutionActivity & { [propertyName: string]: any; }; retentionTimeInDays?: any; + autoUserSpecification?: any; }; // @public @@ -1271,6 +1446,15 @@ export interface CustomerManagedKeyDetails { readonly status?: string; } +// @public +export type CustomEventsTrigger = MultiplePipelineTrigger & { + type: "CustomEventsTrigger"; + subjectBeginsWith?: string; + subjectEndsWith?: string; + events: any[]; + scope: string; +}; + // @public export interface CustomSetupBase { type: "undefined"; @@ -1442,14 +1626,13 @@ export interface DataFlowDebugSessionInfo { } // @public -export class DataFlowDebugSessionOperation { - constructor(client: ArtifactsClientContext); +export interface DataFlowDebugSessionOperation { addDataFlow(request: DataFlowDebugPackage, options?: coreHttp.OperationOptions): Promise; createDataFlowDebugSession(request: CreateDataFlowDebugSessionRequest, options?: coreHttp.OperationOptions): Promise>; deleteDataFlowDebugSession(request: DeleteDataFlowDebugSessionRequest, options?: coreHttp.OperationOptions): Promise; executeCommand(request: DataFlowDebugCommandRequest, options?: coreHttp.OperationOptions): Promise>; listQueryDataFlowDebugSessionsByWorkspace(options?: coreHttp.OperationOptions): PagedAsyncIterableIterator; - } +} // @public export type DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceNextResponse = QueryDataFlowDebugSessionsResponse & { @@ -1516,8 +1699,7 @@ export interface DataFlowListResponse { } // @public -export class DataFlowOperation { - constructor(client: ArtifactsClientContext); +export interface DataFlowOperation { createOrUpdateDataFlow(dataFlowName: string, dataFlow: DataFlowResource, options?: DataFlowCreateOrUpdateDataFlowOptionalParams): Promise>; deleteDataFlow(dataFlowName: string, options?: coreHttp.OperationOptions): Promise>; getDataFlow(dataFlowName: string, options?: DataFlowGetDataFlowOptionalParams): Promise; @@ -1544,11 +1726,15 @@ export type DataFlowResource = SubResource & { // @public export type DataFlowSink = Transformation & { dataset?: DatasetReference; + linkedService?: LinkedServiceReference; + schemaLinkedService?: LinkedServiceReference; }; // @public export type DataFlowSource = Transformation & { dataset?: DatasetReference; + linkedService?: LinkedServiceReference; + schemaLinkedService?: LinkedServiceReference; }; // @public @@ -1599,7 +1785,7 @@ export interface Dataset { }; schema?: any; structure?: any; - type: "Avro" | "Parquet" | "DelimitedText" | "Json" | "Orc" | "Binary" | "AzureTable" | "AzureSqlTable" | "AzureSqlMITable" | "AzureSqlDWTable" | "CassandraTable" | "CustomDataset" | "CosmosDbSqlApiCollection" | "DocumentDbCollection" | "DynamicsEntity" | "DynamicsCrmEntity" | "CommonDataServiceForAppsEntity" | "Office365Table" | "MongoDbCollection" | "MongoDbV2Collection" | "CosmosDbMongoDbApiCollection" | "ODataResource" | "OracleTable" | "TeradataTable" | "AzureMySqlTable" | "AmazonRedshiftTable" | "Db2Table" | "RelationalTable" | "InformixTable" | "OdbcTable" | "MySqlTable" | "PostgreSqlTable" | "MicrosoftAccessTable" | "SalesforceObject" | "SalesforceServiceCloudObject" | "SybaseTable" | "SapBwCube" | "SapCloudForCustomerResource" | "SapEccResource" | "SapHanaTable" | "SapOpenHubTable" | "SqlServerTable" | "RestResource" | "SapTableResource" | "WebTable" | "AzureSearchIndex" | "AmazonMWSObject" | "AzurePostgreSqlTable" | "ConcurObject" | "CouchbaseTable" | "DrillTable" | "EloquaObject" | "GoogleBigQueryObject" | "GreenplumTable" | "HBaseObject" | "HiveObject" | "HubspotObject" | "ImpalaObject" | "JiraObject" | "MagentoObject" | "MariaDBTable" | "AzureMariaDBTable" | "MarketoObject" | "PaypalObject" | "PhoenixObject" | "PrestoObject" | "QuickBooksObject" | "ServiceNowObject" | "ShopifyObject" | "SparkObject" | "SquareObject" | "XeroObject" | "ZohoObject" | "NetezzaTable" | "VerticaTable" | "SalesforceMarketingCloudObject" | "ResponsysObject" | "DynamicsAXResource" | "OracleServiceCloudObject" | "AzureDataExplorerTable" | "GoogleAdWordsObject"; + type: "AmazonS3Object" | "Avro" | "Excel" | "Parquet" | "DelimitedText" | "Json" | "Xml" | "Orc" | "Binary" | "AzureBlob" | "AzureTable" | "AzureSqlTable" | "AzureSqlMITable" | "AzureSqlDWTable" | "CassandraTable" | "CustomDataset" | "CosmosDbSqlApiCollection" | "DocumentDbCollection" | "DynamicsEntity" | "DynamicsCrmEntity" | "CommonDataServiceForAppsEntity" | "AzureDataLakeStoreFile" | "AzureBlobFSFile" | "Office365Table" | "FileShare" | "MongoDbCollection" | "MongoDbAtlasCollection" | "MongoDbV2Collection" | "CosmosDbMongoDbApiCollection" | "ODataResource" | "OracleTable" | "TeradataTable" | "AzureMySqlTable" | "AmazonRedshiftTable" | "Db2Table" | "RelationalTable" | "InformixTable" | "OdbcTable" | "MySqlTable" | "PostgreSqlTable" | "MicrosoftAccessTable" | "SalesforceObject" | "SalesforceServiceCloudObject" | "SybaseTable" | "SapBwCube" | "SapCloudForCustomerResource" | "SapEccResource" | "SapHanaTable" | "SapOpenHubTable" | "SqlServerTable" | "RestResource" | "SapTableResource" | "WebTable" | "AzureSearchIndex" | "HttpFile" | "AmazonMWSObject" | "AzurePostgreSqlTable" | "ConcurObject" | "CouchbaseTable" | "DrillTable" | "EloquaObject" | "GoogleBigQueryObject" | "GreenplumTable" | "HBaseObject" | "HiveObject" | "HubspotObject" | "ImpalaObject" | "JiraObject" | "MagentoObject" | "MariaDBTable" | "AzureMariaDBTable" | "MarketoObject" | "PaypalObject" | "PhoenixObject" | "PrestoObject" | "QuickBooksObject" | "ServiceNowObject" | "ShopifyObject" | "SparkObject" | "SquareObject" | "XeroObject" | "ZohoObject" | "NetezzaTable" | "VerticaTable" | "SalesforceMarketingCloudObject" | "ResponsysObject" | "DynamicsAXResource" | "OracleServiceCloudObject" | "AzureDataExplorerTable" | "GoogleAdWordsObject" | "SnowflakeTable" | "SharePointOnlineListResource" | "AzureDatabricksDeltaLakeDataset"; } // @public @@ -1610,14 +1796,14 @@ export type DatasetBZip2Compression = DatasetCompression & { // @public export interface DatasetCompression { [property: string]: any; - type: "BZip2" | "GZip" | "Deflate" | "ZipDeflate"; + type: "BZip2" | "GZip" | "Deflate" | "ZipDeflate" | "Tar" | "TarGZip"; } // @public export type DatasetCompressionLevel = string; // @public (undocumented) -export type DatasetCompressionUnion = DatasetCompression | DatasetBZip2Compression | DatasetGZipCompression | DatasetDeflateCompression | DatasetZipDeflateCompression; +export type DatasetCompressionUnion = DatasetCompression | DatasetBZip2Compression | DatasetGZipCompression | DatasetDeflateCompression | DatasetZipDeflateCompression | DatasetTarCompression | DatasetTarGZipCompression; // @public export interface DatasetCreateOrUpdateDatasetOptionalParams extends coreHttp.OperationOptions { @@ -1647,7 +1833,7 @@ export type DatasetDebugResource = SubResourceDebugResource & { // @public export type DatasetDeflateCompression = DatasetCompression & { type: "Deflate"; - level?: DatasetCompressionLevel; + level?: any; }; // @public @@ -1687,7 +1873,7 @@ export type DatasetGetDatasetsByWorkspaceResponse = DatasetListResponse & { // @public export type DatasetGZipCompression = DatasetCompression & { type: "GZip"; - level?: DatasetCompressionLevel; + level?: any; }; // @public @@ -1708,8 +1894,7 @@ export interface DatasetLocation { export type DatasetLocationUnion = DatasetLocation | AzureBlobStorageLocation | AzureBlobFSLocation | AzureDataLakeStoreLocation | AmazonS3Location | FileServerLocation | AzureFileStorageLocation | GoogleCloudStorageLocation | FtpServerLocation | SftpLocation | HttpServerLocation | HdfsLocation; // @public -export class DatasetOperation { - constructor(client: ArtifactsClientContext); +export interface DatasetOperation { createOrUpdateDataset(datasetName: string, dataset: DatasetResource, options?: DatasetCreateOrUpdateDatasetOptionalParams): Promise>; deleteDataset(datasetName: string, options?: coreHttp.OperationOptions): Promise>; getDataset(datasetName: string, options?: DatasetGetDatasetOptionalParams): Promise; @@ -1752,13 +1937,24 @@ export interface DatasetStorageFormat { // @public (undocumented) export type DatasetStorageFormatUnion = DatasetStorageFormat | TextFormat | JsonFormat | AvroFormat | OrcFormat | ParquetFormat; +// @public +export type DatasetTarCompression = DatasetCompression & { + type: "Tar"; +}; + +// @public +export type DatasetTarGZipCompression = DatasetCompression & { + type: "TarGZip"; + level?: any; +}; + // @public (undocumented) -export type DatasetUnion = Dataset | AvroDataset | ParquetDataset | DelimitedTextDataset | JsonDataset | OrcDataset | BinaryDataset | AzureTableDataset | AzureSqlTableDataset | AzureSqlMITableDataset | AzureSqlDWTableDataset | CassandraTableDataset | CustomDataset | CosmosDbSqlApiCollectionDataset | DocumentDbCollectionDataset | DynamicsEntityDataset | DynamicsCrmEntityDataset | CommonDataServiceForAppsEntityDataset | Office365Dataset | MongoDbCollectionDataset | MongoDbV2CollectionDataset | CosmosDbMongoDbApiCollectionDataset | ODataResourceDataset | OracleTableDataset | TeradataTableDataset | AzureMySqlTableDataset | AmazonRedshiftTableDataset | Db2TableDataset | RelationalTableDataset | InformixTableDataset | OdbcTableDataset | MySqlTableDataset | PostgreSqlTableDataset | MicrosoftAccessTableDataset | SalesforceObjectDataset | SalesforceServiceCloudObjectDataset | SybaseTableDataset | SapBwCubeDataset | SapCloudForCustomerResourceDataset | SapEccResourceDataset | SapHanaTableDataset | SapOpenHubTableDataset | SqlServerTableDataset | RestResourceDataset | SapTableResourceDataset | WebTableDataset | AzureSearchIndexDataset | AmazonMWSObjectDataset | AzurePostgreSqlTableDataset | ConcurObjectDataset | CouchbaseTableDataset | DrillTableDataset | EloquaObjectDataset | GoogleBigQueryObjectDataset | GreenplumTableDataset | HBaseObjectDataset | HiveObjectDataset | HubspotObjectDataset | ImpalaObjectDataset | JiraObjectDataset | MagentoObjectDataset | MariaDBTableDataset | AzureMariaDBTableDataset | MarketoObjectDataset | PaypalObjectDataset | PhoenixObjectDataset | PrestoObjectDataset | QuickBooksObjectDataset | ServiceNowObjectDataset | ShopifyObjectDataset | SparkObjectDataset | SquareObjectDataset | XeroObjectDataset | ZohoObjectDataset | NetezzaTableDataset | VerticaTableDataset | SalesforceMarketingCloudObjectDataset | ResponsysObjectDataset | DynamicsAXResourceDataset | OracleServiceCloudObjectDataset | AzureDataExplorerTableDataset | GoogleAdWordsObjectDataset; +export type DatasetUnion = Dataset | AmazonS3Dataset | AvroDataset | ExcelDataset | ParquetDataset | DelimitedTextDataset | JsonDataset | XmlDataset | OrcDataset | BinaryDataset | AzureBlobDataset | AzureTableDataset | AzureSqlTableDataset | AzureSqlMITableDataset | AzureSqlDWTableDataset | CassandraTableDataset | CustomDataset | CosmosDbSqlApiCollectionDataset | DocumentDbCollectionDataset | DynamicsEntityDataset | DynamicsCrmEntityDataset | CommonDataServiceForAppsEntityDataset | AzureDataLakeStoreDataset | AzureBlobFSDataset | Office365Dataset | FileShareDataset | MongoDbCollectionDataset | MongoDbAtlasCollectionDataset | MongoDbV2CollectionDataset | CosmosDbMongoDbApiCollectionDataset | ODataResourceDataset | OracleTableDataset | TeradataTableDataset | AzureMySqlTableDataset | AmazonRedshiftTableDataset | Db2TableDataset | RelationalTableDataset | InformixTableDataset | OdbcTableDataset | MySqlTableDataset | PostgreSqlTableDataset | MicrosoftAccessTableDataset | SalesforceObjectDataset | SalesforceServiceCloudObjectDataset | SybaseTableDataset | SapBwCubeDataset | SapCloudForCustomerResourceDataset | SapEccResourceDataset | SapHanaTableDataset | SapOpenHubTableDataset | SqlServerTableDataset | RestResourceDataset | SapTableResourceDataset | WebTableDataset | AzureSearchIndexDataset | HttpDataset | AmazonMWSObjectDataset | AzurePostgreSqlTableDataset | ConcurObjectDataset | CouchbaseTableDataset | DrillTableDataset | EloquaObjectDataset | GoogleBigQueryObjectDataset | GreenplumTableDataset | HBaseObjectDataset | HiveObjectDataset | HubspotObjectDataset | ImpalaObjectDataset | JiraObjectDataset | MagentoObjectDataset | MariaDBTableDataset | AzureMariaDBTableDataset | MarketoObjectDataset | PaypalObjectDataset | PhoenixObjectDataset | PrestoObjectDataset | QuickBooksObjectDataset | ServiceNowObjectDataset | ShopifyObjectDataset | SparkObjectDataset | SquareObjectDataset | XeroObjectDataset | ZohoObjectDataset | NetezzaTableDataset | VerticaTableDataset | SalesforceMarketingCloudObjectDataset | ResponsysObjectDataset | DynamicsAXResourceDataset | OracleServiceCloudObjectDataset | AzureDataExplorerTableDataset | GoogleAdWordsObjectDataset | SnowflakeDataset | SharePointOnlineListResourceDataset | AzureDatabricksDeltaLakeDataset; // @public export type DatasetZipDeflateCompression = DatasetCompression & { type: "ZipDeflate"; - level?: DatasetCompressionLevel; + level?: any; }; // @public @@ -1770,6 +1966,7 @@ export type Db2AuthenticationType = string; // @public export type Db2LinkedService = LinkedService & { type: "Db2"; + connectionString?: any; server: any; database: any; authenticationType?: Db2AuthenticationType; @@ -1802,6 +1999,7 @@ export type DeleteActivity = ExecutionActivity & { enableLogging?: any; logStorageSettings?: LogStorageSettings; dataset: DatasetReference; + storeSettings?: StoreReadSettingsUnion; }; // @public @@ -1810,9 +2008,6 @@ export interface DeleteDataFlowDebugSessionRequest { sessionId?: string; } -// @public -export type DelimitedTextCompressionCodec = string; - // @public export type DelimitedTextDataset = Dataset & { type: "DelimitedText"; @@ -1820,8 +2015,8 @@ export type DelimitedTextDataset = Dataset & { columnDelimiter?: any; rowDelimiter?: any; encodingName?: any; - compressionCodec?: DelimitedTextCompressionCodec; - compressionLevel?: DatasetCompressionLevel; + compressionCodec?: CompressionCodec; + compressionLevel?: any; quoteChar?: any; escapeChar?: any; firstRowAsHeader?: any; @@ -1832,6 +2027,7 @@ export type DelimitedTextDataset = Dataset & { export type DelimitedTextReadSettings = FormatReadSettings & { type: "DelimitedTextReadSettings"; skipLineCount?: any; + compressionProperties?: CompressionReadSettingsUnion; }; // @public @@ -1846,6 +2042,7 @@ export type DelimitedTextSource = CopySource & { type: "DelimitedTextSource"; storeSettings?: StoreReadSettingsUnion; formatSettings?: DelimitedTextReadSettings; + additionalColumns?: AdditionalColumns[]; }; // @public @@ -1853,6 +2050,8 @@ export type DelimitedTextWriteSettings = FormatWriteSettings & { type: "DelimitedTextWriteSettings"; quoteAllText?: any; fileExtension: any; + maxRowsPerFile?: any; + fileNamePrefix?: any; }; // @public @@ -1892,6 +2091,7 @@ export type DocumentDbCollectionSource = CopySource & { query?: any; nestingSeparator?: any; queryTimeout?: any; + additionalColumns?: AdditionalColumns[]; }; // @public @@ -1959,6 +2159,7 @@ export type DynamicsAXResourceDataset = Dataset & { export type DynamicsAXSource = TabularSource & { type: "DynamicsAXSource"; query?: any; + httpRequestTimeout?: any; }; // @public @@ -1979,7 +2180,7 @@ export type DynamicsCrmLinkedService = LinkedService & { username?: any; password?: SecretBaseUnion; servicePrincipalId?: any; - servicePrincipalCredentialType?: DynamicsServicePrincipalCredentialType; + servicePrincipalCredentialType?: any; servicePrincipalCredential?: SecretBaseUnion; encryptedCredential?: any; }; @@ -1996,6 +2197,7 @@ export type DynamicsCrmSink = CopySink & { export type DynamicsCrmSource = CopySource & { type: "DynamicsCrmSource"; query?: any; + additionalColumns?: AdditionalColumns[]; }; // @public @@ -2011,10 +2213,10 @@ export type DynamicsEntityDataset = Dataset & { export type DynamicsLinkedService = LinkedService & { type: "Dynamics"; deploymentType: DynamicsDeploymentType; - hostName?: string; - port?: string; - serviceUri?: string; - organizationName?: string; + hostName?: any; + port?: any; + serviceUri?: any; + organizationName?: any; authenticationType: DynamicsAuthenticationType; username?: any; password?: SecretBaseUnion; @@ -2042,6 +2244,7 @@ export type DynamicsSinkWriteBehavior = string; export type DynamicsSource = CopySource & { type: "DynamicsSource"; query?: any; + additionalColumns?: AdditionalColumns[]; }; // @public @@ -2112,13 +2315,34 @@ export interface EvaluateDataFlowExpressionRequest { // @public export type EventSubscriptionStatus = string; +// @public +export type ExcelDataset = Dataset & { + type: "Excel"; + location?: DatasetLocationUnion; + sheetName?: any; + range?: any; + firstRowAsHeader?: any; + compression?: DatasetCompressionUnion; + nullValue?: any; +}; + +// @public +export type ExcelSource = CopySource & { + type: "ExcelSource"; + storeSettings?: StoreReadSettingsUnion; + additionalColumns?: AdditionalColumns[]; +}; + // @public export type ExecuteDataFlowActivity = ExecutionActivity & { type: "ExecuteDataFlow"; - dataFlow: DataFlowReference; + dataflow: DataFlowReference; staging?: DataFlowStagingInfo; integrationRuntime?: IntegrationRuntimeReference; compute?: ExecuteDataFlowActivityTypePropertiesCompute; + traceLevel?: any; + continueOnError?: any; + runConcurrently?: any; }; // @public @@ -2178,6 +2402,15 @@ export type ExecutionActivity = Activity & { // @public (undocumented) export type ExecutionActivityUnion = ExecutionActivity | CopyActivity | HDInsightHiveActivity | HDInsightPigActivity | HDInsightMapReduceActivity | HDInsightStreamingActivity | HDInsightSparkActivity | ExecuteSsisPackageActivity | CustomActivity | SqlServerStoredProcedureActivity | DeleteActivity | AzureDataExplorerCommandActivity | LookupActivity | WebActivity | GetMetadataActivity | AzureMLBatchExecutionActivity | AzureMLUpdateResourceActivity | AzureMLExecutePipelineActivity | DataLakeAnalyticsUsqlActivity | DatabricksNotebookActivity | DatabricksSparkJarActivity | DatabricksSparkPythonActivity | AzureFunctionActivity | ExecuteDataFlowActivity | SynapseNotebookActivity | SynapseSparkJobDefinitionActivity; +// @public +export interface ExportSettings { + [property: string]: any; + type: "SnowflakeExportCopyCommand" | "AzureDatabricksDeltaLakeExportCommand"; +} + +// @public (undocumented) +export type ExportSettingsUnion = ExportSettings | SnowflakeExportCopyCommand | AzureDatabricksDeltaLakeExportCommand; + // @public export interface ExposureControlRequest { featureName?: string; @@ -2219,9 +2452,13 @@ export type FileServerReadSettings = StoreReadSettings & { recursive?: any; wildcardFolderPath?: any; wildcardFileName?: any; + fileListPath?: any; enablePartitionDiscovery?: boolean; + partitionRootPath?: any; + deleteFilesAfterCompletion?: any; modifiedDatetimeStart?: any; modifiedDatetimeEnd?: any; + fileFilter?: any; }; // @public @@ -2229,6 +2466,18 @@ export type FileServerWriteSettings = StoreWriteSettings & { type: "FileServerWriteSettings"; }; +// @public +export type FileShareDataset = Dataset & { + type: "FileShare"; + folderPath?: any; + fileName?: any; + modifiedDatetimeStart?: any; + modifiedDatetimeEnd?: any; + format?: DatasetStorageFormatUnion; + fileFilter?: any; + compression?: DatasetCompressionUnion; +}; + // @public export type FileSystemSink = CopySink & { type: "FileSystemSink"; @@ -2239,6 +2488,7 @@ export type FileSystemSink = CopySink & { export type FileSystemSource = CopySource & { type: "FileSystemSource"; recursive?: any; + additionalColumns?: AdditionalColumns[]; }; // @public @@ -2263,20 +2513,20 @@ export type ForEachActivity = ControlActivity & { // @public export interface FormatReadSettings { [property: string]: any; - type: "DelimitedTextReadSettings"; + type: "DelimitedTextReadSettings" | "JsonReadSettings" | "XmlReadSettings" | "BinaryReadSettings"; } // @public (undocumented) -export type FormatReadSettingsUnion = FormatReadSettings | DelimitedTextReadSettings; +export type FormatReadSettingsUnion = FormatReadSettings | DelimitedTextReadSettings | JsonReadSettings | XmlReadSettings | BinaryReadSettings; // @public export interface FormatWriteSettings { [property: string]: any; - type: "AvroWriteSettings" | "DelimitedTextWriteSettings" | "JsonWriteSettings"; + type: "AvroWriteSettings" | "OrcWriteSettings" | "ParquetWriteSettings" | "DelimitedTextWriteSettings" | "JsonWriteSettings"; } // @public (undocumented) -export type FormatWriteSettingsUnion = FormatWriteSettings | AvroWriteSettings | DelimitedTextWriteSettings | JsonWriteSettings; +export type FormatWriteSettingsUnion = FormatWriteSettings | AvroWriteSettings | OrcWriteSettings | ParquetWriteSettings | DelimitedTextWriteSettings | JsonWriteSettings; // @public export type FtpAuthenticationType = string; @@ -2287,6 +2537,10 @@ export type FtpReadSettings = StoreReadSettings & { recursive?: any; wildcardFolderPath?: any; wildcardFileName?: any; + enablePartitionDiscovery?: boolean; + partitionRootPath?: any; + deleteFilesAfterCompletion?: any; + fileListPath?: any; useBinaryTransfer?: boolean; }; @@ -2313,6 +2567,8 @@ export type GetMetadataActivity = ExecutionActivity & { type: "GetMetadata"; dataset: DatasetReference; fieldList?: any[]; + storeSettings?: StoreReadSettingsUnion; + formatSettings?: FormatReadSettingsUnion; }; // @public @@ -2421,7 +2677,10 @@ export type GoogleCloudStorageReadSettings = StoreReadSettings & { wildcardFolderPath?: any; wildcardFileName?: any; prefix?: any; + fileListPath?: any; enablePartitionDiscovery?: boolean; + partitionRootPath?: any; + deleteFilesAfterCompletion?: any; modifiedDatetimeStart?: any; modifiedDatetimeEnd?: any; }; @@ -2500,10 +2759,13 @@ export type HdfsReadSettings = StoreReadSettings & { recursive?: any; wildcardFolderPath?: any; wildcardFileName?: any; + fileListPath?: any; enablePartitionDiscovery?: boolean; + partitionRootPath?: any; modifiedDatetimeStart?: any; modifiedDatetimeEnd?: any; distcpSettings?: DistcpSettings; + deleteFilesAfterCompletion?: any; }; // @public @@ -2695,6 +2957,17 @@ export type HiveThriftTransportProtocol = string; // @public export type HttpAuthenticationType = string; +// @public +export type HttpDataset = Dataset & { + type: "HttpFile"; + relativeUrl?: any; + requestMethod?: any; + requestBody?: any; + additionalHeaders?: any; + format?: DatasetStorageFormatUnion; + compression?: DatasetCompressionUnion; +}; + // @public export type HttpLinkedService = LinkedService & { type: "HttpServer"; @@ -2715,6 +2988,8 @@ export type HttpReadSettings = StoreReadSettings & { requestBody?: any; additionalHeaders?: any; requestTimeout?: any; + enablePartitionDiscovery?: boolean; + partitionRootPath?: any; }; // @public @@ -2795,6 +3070,15 @@ export type ImpalaSource = TabularSource & { query?: any; }; +// @public +export interface ImportSettings { + [property: string]: any; + type: "AzureDatabricksDeltaLakeImportCommand" | "SnowflakeImportCopyCommand"; +} + +// @public (undocumented) +export type ImportSettingsUnion = ImportSettings | AzureDatabricksDeltaLakeImportCommand | SnowflakeImportCopyCommand; + // @public export type InformixLinkedService = LinkedService & { type: "Informix"; @@ -2912,8 +3196,7 @@ export type IntegrationRuntimesListResponse = IntegrationRuntimeListResponse & { }; // @public -export class IntegrationRuntimesOperation { - constructor(client: ArtifactsClientContext); +export interface IntegrationRuntimesOperation { get(integrationRuntimeName: string, options?: coreHttp.OperationOptions): Promise; list(options?: coreHttp.OperationOptions): Promise; } @@ -3004,6 +3287,12 @@ export type JsonFormat = DatasetStorageFormat & { // @public export type JsonFormatFilePattern = string; +// @public +export type JsonReadSettings = FormatReadSettings & { + type: "JsonReadSettings"; + compressionProperties?: CompressionReadSettingsUnion; +}; + // @public export type JsonSink = CopySink & { type: "JsonSink"; @@ -3015,6 +3304,8 @@ export type JsonSink = CopySink & { export type JsonSource = CopySource & { type: "JsonSource"; storeSettings?: StoreReadSettingsUnion; + formatSettings?: JsonReadSettings; + additionalColumns?: AdditionalColumns[]; }; // @public @@ -3116,6 +3407,26 @@ export const enum KnownCellOutputType { Stream = "stream" } +// @public +export const enum KnownCompressionCodec { + // (undocumented) + Bzip2 = "bzip2", + // (undocumented) + Deflate = "deflate", + // (undocumented) + Gzip = "gzip", + // (undocumented) + Lz4 = "lz4", + // (undocumented) + Snappy = "snappy", + // (undocumented) + Tar = "tar", + // (undocumented) + TarGZip = "tarGZip", + // (undocumented) + ZipDeflate = "zipDeflate" +} + // @public export const enum KnownCopyBehaviorType { // (undocumented) @@ -3162,22 +3473,6 @@ export const enum KnownDb2AuthenticationType { Basic = "Basic" } -// @public -export const enum KnownDelimitedTextCompressionCodec { - // (undocumented) - Bzip2 = "bzip2", - // (undocumented) - Deflate = "deflate", - // (undocumented) - Gzip = "gzip", - // (undocumented) - Lz4 = "lz4", - // (undocumented) - Snappy = "snappy", - // (undocumented) - ZipDeflate = "zipDeflate" -} - // @public export const enum KnownDependencyCondition { // (undocumented) @@ -3524,6 +3819,8 @@ export const enum KnownOraclePartitionOption { // @public export const enum KnownOrcCompressionCodec { + // (undocumented) + Lzo = "lzo", // (undocumented) None = "none", // (undocumented) @@ -3551,7 +3848,7 @@ export const enum KnownParameterType { } // @public -export const enum KnownParquetCompressionCodec { +export const enum KnownParquetCompressionCodecEnum { // (undocumented) Gzip = "gzip", // (undocumented) @@ -3874,6 +4171,16 @@ export const enum KnownSqlConnectionType { SqlPool = "SqlPool" } +// @public +export const enum KnownSqlPartitionOption { + // (undocumented) + DynamicRange = "DynamicRange", + // (undocumented) + None = "None", + // (undocumented) + PhysicalPartitionsOfTable = "PhysicalPartitionsOfTable" +} + // @public export const enum KnownSqlPoolReferenceType { // (undocumented) @@ -3899,6 +4206,8 @@ export const enum KnownSsisPackageLocationType { // (undocumented) InlinePackage = "InlinePackage", // (undocumented) + PackageStore = "PackageStore", + // (undocumented) Ssisdb = "SSISDB" } @@ -3977,7 +4286,9 @@ export const enum KnownTumblingWindowFrequency { // (undocumented) Hour = "Hour", // (undocumented) - Minute = "Minute" + Minute = "Minute", + // (undocumented) + Month = "Month" } // @public @@ -4081,8 +4392,7 @@ export interface LibraryListResponse { } // @public -export class LibraryOperaion { - constructor(client: ArtifactsClientContext); +export interface LibraryOperaion { append(libraryName: string, content: coreHttp.HttpRequestBody, options?: LibraryAppendOptionalParams): Promise; create(libraryName: string, options?: coreHttp.OperationOptions): Promise>; delete(libraryName: string, options?: coreHttp.OperationOptions): Promise>; @@ -4090,7 +4400,7 @@ export class LibraryOperaion { get(libraryName: string, options?: coreHttp.OperationOptions): Promise; getOperationResult(operationId: string, options?: coreHttp.OperationOptions): Promise; list(options?: coreHttp.OperationOptions): PagedAsyncIterableIterator; - } +} // @public export interface LibraryRequirements { @@ -4157,7 +4467,7 @@ export interface LinkedService { parameters?: { [propertyName: string]: ParameterSpecification; }; - type: "AzureStorage" | "AzureBlobStorage" | "AzureTableStorage" | "AzureSqlDW" | "SqlServer" | "AzureSqlDatabase" | "AzureSqlMI" | "AzureBatch" | "AzureKeyVault" | "CosmosDb" | "Dynamics" | "DynamicsCrm" | "CommonDataServiceForApps" | "HDInsight" | "FileServer" | "AzureFileStorage" | "GoogleCloudStorage" | "Oracle" | "AzureMySql" | "MySql" | "PostgreSql" | "Sybase" | "Db2" | "Teradata" | "AzureML" | "AzureMLService" | "Odbc" | "Informix" | "MicrosoftAccess" | "Hdfs" | "OData" | "Web" | "Cassandra" | "MongoDb" | "MongoDbV2" | "CosmosDbMongoDbApi" | "AzureDataLakeStore" | "AzureBlobFS" | "Office365" | "Salesforce" | "SalesforceServiceCloud" | "SapCloudForCustomer" | "SapEcc" | "SapOpenHub" | "RestService" | "AmazonS3" | "AmazonRedshift" | "CustomDataSource" | "AzureSearch" | "HttpServer" | "FtpServer" | "Sftp" | "SapBW" | "SapHana" | "AmazonMWS" | "AzurePostgreSql" | "Concur" | "Couchbase" | "Drill" | "Eloqua" | "GoogleBigQuery" | "Greenplum" | "HBase" | "Hive" | "Hubspot" | "Impala" | "Jira" | "Magento" | "MariaDB" | "AzureMariaDB" | "Marketo" | "Paypal" | "Phoenix" | "Presto" | "QuickBooks" | "ServiceNow" | "Shopify" | "Spark" | "Square" | "Xero" | "Zoho" | "Vertica" | "Netezza" | "SalesforceMarketingCloud" | "HDInsightOnDemand" | "AzureDataLakeAnalytics" | "AzureDatabricks" | "Responsys" | "DynamicsAX" | "OracleServiceCloud" | "GoogleAdWords" | "SapTable" | "AzureDataExplorer" | "AzureFunction"; + type: "AzureStorage" | "AzureBlobStorage" | "AzureTableStorage" | "AzureSqlDW" | "SqlServer" | "AzureSqlDatabase" | "AzureSqlMI" | "AzureBatch" | "AzureKeyVault" | "CosmosDb" | "Dynamics" | "DynamicsCrm" | "CommonDataServiceForApps" | "HDInsight" | "FileServer" | "AzureFileStorage" | "GoogleCloudStorage" | "Oracle" | "AzureMySql" | "MySql" | "PostgreSql" | "Sybase" | "Db2" | "Teradata" | "AzureML" | "AzureMLService" | "Odbc" | "Informix" | "MicrosoftAccess" | "Hdfs" | "OData" | "Web" | "Cassandra" | "MongoDb" | "MongoDbAtlas" | "MongoDbV2" | "CosmosDbMongoDbApi" | "AzureDataLakeStore" | "AzureBlobFS" | "Office365" | "Salesforce" | "SalesforceServiceCloud" | "SapCloudForCustomer" | "SapEcc" | "SapOpenHub" | "RestService" | "AmazonS3" | "AmazonRedshift" | "CustomDataSource" | "AzureSearch" | "HttpServer" | "FtpServer" | "Sftp" | "SapBW" | "SapHana" | "AmazonMWS" | "AzurePostgreSql" | "Concur" | "Couchbase" | "Drill" | "Eloqua" | "GoogleBigQuery" | "Greenplum" | "HBase" | "Hive" | "Hubspot" | "Impala" | "Jira" | "Magento" | "MariaDB" | "AzureMariaDB" | "Marketo" | "Paypal" | "Phoenix" | "Presto" | "QuickBooks" | "ServiceNow" | "Shopify" | "Spark" | "Square" | "Xero" | "Zoho" | "Vertica" | "Netezza" | "SalesforceMarketingCloud" | "HDInsightOnDemand" | "AzureDataLakeAnalytics" | "AzureDatabricks" | "AzureDatabricksDeltaLake" | "Responsys" | "DynamicsAX" | "OracleServiceCloud" | "GoogleAdWords" | "SapTable" | "AzureDataExplorer" | "AzureFunction" | "Snowflake" | "SharePointOnlineList"; } // @public @@ -4215,8 +4525,7 @@ export interface LinkedServiceListResponse { } // @public -export class LinkedServiceOperation { - constructor(client: ArtifactsClientContext); +export interface LinkedServiceOperation { createOrUpdateLinkedService(linkedServiceName: string, linkedService: LinkedServiceResource, options?: LinkedServiceCreateOrUpdateLinkedServiceOptionalParams): Promise>; deleteLinkedService(linkedServiceName: string, options?: coreHttp.OperationOptions): Promise>; getLinkedService(linkedServiceName: string, options?: LinkedServiceGetLinkedServiceOptionalParams): Promise; @@ -4239,12 +4548,27 @@ export type LinkedServiceResource = SubResource & { }; // @public (undocumented) -export type LinkedServiceUnion = LinkedService | AzureStorageLinkedService | AzureBlobStorageLinkedService | AzureTableStorageLinkedService | AzureSqlDWLinkedService | SqlServerLinkedService | AzureSqlDatabaseLinkedService | AzureSqlMILinkedService | AzureBatchLinkedService | AzureKeyVaultLinkedService | CosmosDbLinkedService | DynamicsLinkedService | DynamicsCrmLinkedService | CommonDataServiceForAppsLinkedService | HDInsightLinkedService | FileServerLinkedService | AzureFileStorageLinkedService | GoogleCloudStorageLinkedService | OracleLinkedService | AzureMySqlLinkedService | MySqlLinkedService | PostgreSqlLinkedService | SybaseLinkedService | Db2LinkedService | TeradataLinkedService | AzureMLLinkedService | AzureMLServiceLinkedService | OdbcLinkedService | InformixLinkedService | MicrosoftAccessLinkedService | HdfsLinkedService | ODataLinkedService | WebLinkedService | CassandraLinkedService | MongoDbLinkedService | MongoDbV2LinkedService | CosmosDbMongoDbApiLinkedService | AzureDataLakeStoreLinkedService | AzureBlobFSLinkedService | Office365LinkedService | SalesforceLinkedService | SalesforceServiceCloudLinkedService | SapCloudForCustomerLinkedService | SapEccLinkedService | SapOpenHubLinkedService | RestServiceLinkedService | AmazonS3LinkedService | AmazonRedshiftLinkedService | CustomDataSourceLinkedService | AzureSearchLinkedService | HttpLinkedService | FtpServerLinkedService | SftpServerLinkedService | SapBWLinkedService | SapHanaLinkedService | AmazonMWSLinkedService | AzurePostgreSqlLinkedService | ConcurLinkedService | CouchbaseLinkedService | DrillLinkedService | EloquaLinkedService | GoogleBigQueryLinkedService | GreenplumLinkedService | HBaseLinkedService | HiveLinkedService | HubspotLinkedService | ImpalaLinkedService | JiraLinkedService | MagentoLinkedService | MariaDBLinkedService | AzureMariaDBLinkedService | MarketoLinkedService | PaypalLinkedService | PhoenixLinkedService | PrestoLinkedService | QuickBooksLinkedService | ServiceNowLinkedService | ShopifyLinkedService | SparkLinkedService | SquareLinkedService | XeroLinkedService | ZohoLinkedService | VerticaLinkedService | NetezzaLinkedService | SalesforceMarketingCloudLinkedService | HDInsightOnDemandLinkedService | AzureDataLakeAnalyticsLinkedService | AzureDatabricksLinkedService | ResponsysLinkedService | DynamicsAXLinkedService | OracleServiceCloudLinkedService | GoogleAdWordsLinkedService | SapTableLinkedService | AzureDataExplorerLinkedService | AzureFunctionLinkedService; +export type LinkedServiceUnion = LinkedService | AzureStorageLinkedService | AzureBlobStorageLinkedService | AzureTableStorageLinkedService | AzureSqlDWLinkedService | SqlServerLinkedService | AzureSqlDatabaseLinkedService | AzureSqlMILinkedService | AzureBatchLinkedService | AzureKeyVaultLinkedService | CosmosDbLinkedService | DynamicsLinkedService | DynamicsCrmLinkedService | CommonDataServiceForAppsLinkedService | HDInsightLinkedService | FileServerLinkedService | AzureFileStorageLinkedService | GoogleCloudStorageLinkedService | OracleLinkedService | AzureMySqlLinkedService | MySqlLinkedService | PostgreSqlLinkedService | SybaseLinkedService | Db2LinkedService | TeradataLinkedService | AzureMLLinkedService | AzureMLServiceLinkedService | OdbcLinkedService | InformixLinkedService | MicrosoftAccessLinkedService | HdfsLinkedService | ODataLinkedService | WebLinkedService | CassandraLinkedService | MongoDbLinkedService | MongoDbAtlasLinkedService | MongoDbV2LinkedService | CosmosDbMongoDbApiLinkedService | AzureDataLakeStoreLinkedService | AzureBlobFSLinkedService | Office365LinkedService | SalesforceLinkedService | SalesforceServiceCloudLinkedService | SapCloudForCustomerLinkedService | SapEccLinkedService | SapOpenHubLinkedService | RestServiceLinkedService | AmazonS3LinkedService | AmazonRedshiftLinkedService | CustomDataSourceLinkedService | AzureSearchLinkedService | HttpLinkedService | FtpServerLinkedService | SftpServerLinkedService | SapBWLinkedService | SapHanaLinkedService | AmazonMWSLinkedService | AzurePostgreSqlLinkedService | ConcurLinkedService | CouchbaseLinkedService | DrillLinkedService | EloquaLinkedService | GoogleBigQueryLinkedService | GreenplumLinkedService | HBaseLinkedService | HiveLinkedService | HubspotLinkedService | ImpalaLinkedService | JiraLinkedService | MagentoLinkedService | MariaDBLinkedService | AzureMariaDBLinkedService | MarketoLinkedService | PaypalLinkedService | PhoenixLinkedService | PrestoLinkedService | QuickBooksLinkedService | ServiceNowLinkedService | ShopifyLinkedService | SparkLinkedService | SquareLinkedService | XeroLinkedService | ZohoLinkedService | VerticaLinkedService | NetezzaLinkedService | SalesforceMarketingCloudLinkedService | HDInsightOnDemandLinkedService | AzureDataLakeAnalyticsLinkedService | AzureDatabricksLinkedService | AzureDatabricksDeltaLakeLinkedService | ResponsysLinkedService | DynamicsAXLinkedService | OracleServiceCloudLinkedService | GoogleAdWordsLinkedService | SapTableLinkedService | AzureDataExplorerLinkedService | AzureFunctionLinkedService | SnowflakeLinkedService | SharePointOnlineListLinkedService; + +// @public +export interface LogLocationSettings { + linkedServiceName: LinkedServiceReference; + path?: any; +} + +// @public +export interface LogSettings { + copyActivityLogSettings?: CopyActivityLogSettings; + enableCopyActivityLog?: any; + logLocationSettings: LogLocationSettings; +} // @public export interface LogStorageSettings { [property: string]: any; + enableReliableLogging?: any; linkedServiceName: LinkedServiceReference; + logLevel?: any; path?: any; } @@ -4363,10 +4687,17 @@ export interface ManagedIdentity { export type ManagedIntegrationRuntime = IntegrationRuntime & { type: "Managed"; readonly state?: IntegrationRuntimeState; + managedVirtualNetwork?: ManagedVirtualNetworkReference; computeProperties?: IntegrationRuntimeComputeProperties; ssisProperties?: IntegrationRuntimeSsisProperties; }; +// @public +export interface ManagedVirtualNetworkReference { + referenceName: string; + type: "ManagedVirtualNetworkReference"; +} + // @public export interface ManagedVirtualNetworkSettings { allowedAadTenantIdsForLinking?: string[]; @@ -4448,6 +4779,7 @@ export type MicrosoftAccessSink = CopySink & { export type MicrosoftAccessSource = CopySource & { type: "MicrosoftAccessSource"; query?: any; + additionalColumns?: AdditionalColumns[]; }; // @public @@ -4456,6 +4788,29 @@ export type MicrosoftAccessTableDataset = Dataset & { tableName?: any; }; +// @public +export type MongoDbAtlasCollectionDataset = Dataset & { + type: "MongoDbAtlasCollection"; + collection: any; +}; + +// @public +export type MongoDbAtlasLinkedService = LinkedService & { + type: "MongoDbAtlas"; + connectionString: any; + database: any; +}; + +// @public +export type MongoDbAtlasSource = CopySource & { + type: "MongoDbAtlasSource"; + filter?: any; + cursorMethods?: MongoDbCursorMethodsProperties; + batchSize?: any; + queryTimeout?: any; + additionalColumns?: AdditionalColumns[]; +}; + // @public export type MongoDbAuthenticationType = string; @@ -4493,6 +4848,7 @@ export type MongoDbLinkedService = LinkedService & { export type MongoDbSource = CopySource & { type: "MongoDbSource"; query?: any; + additionalColumns?: AdditionalColumns[]; }; // @public @@ -4515,6 +4871,7 @@ export type MongoDbV2Source = CopySource & { cursorMethods?: MongoDbCursorMethodsProperties; batchSize?: any; queryTimeout?: any; + additionalColumns?: AdditionalColumns[]; }; // @public @@ -4524,7 +4881,7 @@ export type MultiplePipelineTrigger = Trigger & { }; // @public (undocumented) -export type MultiplePipelineTriggerUnion = MultiplePipelineTrigger | ScheduleTrigger | BlobTrigger | BlobEventsTrigger; +export type MultiplePipelineTriggerUnion = MultiplePipelineTrigger | ScheduleTrigger | BlobTrigger | BlobEventsTrigger | CustomEventsTrigger; // @public export type MySqlLinkedService = LinkedService & { @@ -4705,8 +5062,7 @@ export interface NotebookMetadata { } // @public -export class NotebookOperation { - constructor(client: ArtifactsClientContext); +export interface NotebookOperation { createOrUpdateNotebook(notebookName: string, notebook: NotebookResource, options?: NotebookCreateOrUpdateNotebookOptionalParams): Promise>; deleteNotebook(notebookName: string, options?: coreHttp.OperationOptions): Promise>; getNotebook(notebookName: string, options?: NotebookGetNotebookOptionalParams): Promise; @@ -4751,6 +5107,7 @@ export type ODataLinkedService = LinkedService & { password?: SecretBaseUnion; tenant?: any; servicePrincipalId?: any; + azureCloudType?: any; aadResourceId?: any; aadServicePrincipalCredentialType?: ODataAadServicePrincipalCredentialType; servicePrincipalKey?: SecretBaseUnion; @@ -4769,6 +5126,8 @@ export type ODataResourceDataset = Dataset & { export type ODataSource = CopySource & { type: "ODataSource"; query?: any; + httpRequestTimeout?: any; + additionalColumns?: AdditionalColumns[]; }; // @public @@ -4893,6 +5252,7 @@ export type OracleSource = CopySource & { queryTimeout?: any; partitionOption?: OraclePartitionOption; partitionSettings?: OraclePartitionSettings; + additionalColumns?: AdditionalColumns[]; }; // @public @@ -4922,12 +5282,21 @@ export type OrcFormat = DatasetStorageFormat & { export type OrcSink = CopySink & { type: "OrcSink"; storeSettings?: StoreWriteSettingsUnion; + formatSettings?: OrcWriteSettings; }; // @public export type OrcSource = CopySource & { type: "OrcSource"; storeSettings?: StoreReadSettingsUnion; + additionalColumns?: AdditionalColumns[]; +}; + +// @public +export type OrcWriteSettings = FormatWriteSettings & { + type: "OrcWriteSettings"; + maxRowsPerFile?: any; + fileNamePrefix?: any; }; // @public @@ -4940,13 +5309,13 @@ export interface ParameterSpecification { export type ParameterType = string; // @public -export type ParquetCompressionCodec = string; +export type ParquetCompressionCodecEnum = string; // @public export type ParquetDataset = Dataset & { type: "Parquet"; location?: DatasetLocationUnion; - compressionCodec?: ParquetCompressionCodec; + compressionCodec?: any; }; // @public @@ -4958,12 +5327,21 @@ export type ParquetFormat = DatasetStorageFormat & { export type ParquetSink = CopySink & { type: "ParquetSink"; storeSettings?: StoreWriteSettingsUnion; + formatSettings?: ParquetWriteSettings; }; // @public export type ParquetSource = CopySource & { type: "ParquetSource"; storeSettings?: StoreReadSettingsUnion; + additionalColumns?: AdditionalColumns[]; +}; + +// @public +export type ParquetWriteSettings = FormatWriteSettings & { + type: "ParquetWriteSettings"; + maxRowsPerFile?: any; + fileNamePrefix?: any; }; // @public @@ -5097,8 +5475,7 @@ export interface PipelineListResponse { } // @public -export class PipelineOperation { - constructor(client: ArtifactsClientContext); +export interface PipelineOperation { createOrUpdatePipeline(pipelineName: string, pipeline: PipelineResource, options?: PipelineCreateOrUpdatePipelineOptionalParams): Promise>; createPipelineRun(pipelineName: string, options?: PipelineCreatePipelineRunOptionalParams): Promise; deletePipeline(pipelineName: string, options?: coreHttp.OperationOptions): Promise>; @@ -5176,8 +5553,7 @@ export interface PipelineRunInvokedBy { } // @public -export class PipelineRunOperation { - constructor(client: ArtifactsClientContext); +export interface PipelineRunOperation { cancelPipelineRun(runId: string, options?: PipelineRunCancelPipelineRunOptionalParams): Promise; getPipelineRun(runId: string, options?: coreHttp.OperationOptions): Promise; queryActivityRuns(pipelineName: string, runId: string, filterParameters: RunFilterParameters, options?: coreHttp.OperationOptions): Promise; @@ -5315,6 +5691,7 @@ export interface QueryDataFlowDebugSessionsResponse { // @public export type QuickBooksLinkedService = LinkedService & { type: "QuickBooks"; + connectionProperties?: any; endpoint: any; companyId: any; consumerKey: any; @@ -5374,6 +5751,7 @@ export interface RedshiftUnloadSettings { export type RelationalSource = CopySource & { type: "RelationalSource"; query?: any; + additionalColumns?: AdditionalColumns[]; }; // @public @@ -5396,10 +5774,10 @@ export type RerunTriggerResource = SubResource & { // @public export type RerunTumblingWindowTrigger = Trigger & { type: "RerunTumblingWindowTrigger"; - parentTrigger?: any; + parentTrigger: any; requestedStartTime: Date; requestedEndTime: Date; - maxConcurrency: number; + rerunConcurrency: number; }; // @public @@ -5467,10 +5845,21 @@ export type RestServiceLinkedService = LinkedService & { servicePrincipalId?: any; servicePrincipalKey?: SecretBaseUnion; tenant?: any; + azureCloudType?: any; aadResourceId?: any; encryptedCredential?: any; }; +// @public +export type RestSink = CopySink & { + type: "RestSink"; + requestMethod?: any; + additionalHeaders?: any; + httpRequestTimeout?: any; + requestInterval?: any; + httpCompressionType?: any; +}; + // @public export type RestSource = CopySource & { type: "RestSource"; @@ -5480,6 +5869,7 @@ export type RestSource = CopySource & { paginationRules?: any; httpRequestTimeout?: any; requestInterval?: any; + additionalColumns?: AdditionalColumns[]; }; // @public @@ -5529,12 +5919,14 @@ export type SalesforceLinkedService = LinkedService & { username?: any; password?: SecretBaseUnion; securityToken?: SecretBaseUnion; + apiVersion?: any; encryptedCredential?: any; }; // @public export type SalesforceMarketingCloudLinkedService = LinkedService & { type: "SalesforceMarketingCloud"; + connectionProperties?: any; clientId: any; clientSecret?: SecretBaseUnion; useEncryptedEndpoints?: any; @@ -5568,6 +5960,7 @@ export type SalesforceServiceCloudLinkedService = LinkedService & { username?: any; password?: SecretBaseUnion; securityToken?: SecretBaseUnion; + apiVersion?: any; extendedProperties?: any; encryptedCredential?: any; }; @@ -5591,6 +5984,7 @@ export type SalesforceServiceCloudSource = CopySource & { type: "SalesforceServiceCloudSource"; query?: any; readBehavior?: SalesforceSourceReadBehavior; + additionalColumns?: AdditionalColumns[]; }; // @public @@ -5655,6 +6049,7 @@ export type SapCloudForCustomerResourceDataset = Dataset & { export type SapCloudForCustomerSink = CopySink & { type: "SapCloudForCustomerSink"; writeBehavior?: SapCloudForCustomerSinkWriteBehavior; + httpRequestTimeout?: any; }; // @public @@ -5664,6 +6059,7 @@ export type SapCloudForCustomerSinkWriteBehavior = string; export type SapCloudForCustomerSource = TabularSource & { type: "SapCloudForCustomerSource"; query?: any; + httpRequestTimeout?: any; }; // @public @@ -5685,6 +6081,7 @@ export type SapEccResourceDataset = Dataset & { export type SapEccSource = TabularSource & { type: "SapEccSource"; query?: any; + httpRequestTimeout?: any; }; // @public @@ -5732,8 +6129,12 @@ export type SapOpenHubLinkedService = LinkedService & { systemNumber: any; clientId: any; language?: any; + systemId?: any; userName?: any; password?: SecretBaseUnion; + messageServer?: any; + messageServerService?: any; + logonGroup?: any; encryptedCredential?: any; }; @@ -5742,6 +6143,8 @@ export type SapOpenHubSource = TabularSource & { type: "SapOpenHubSource"; excludeLastRequest?: any; baseRequestId?: any; + customRfcReadTableFunctionModule?: any; + sapDataColumnDelimiter?: any; }; // @public @@ -5799,6 +6202,7 @@ export type SapTableSource = TabularSource & { rfcTableOptions?: any; batchSize?: any; customRfcReadTableFunctionModule?: any; + sapDataColumnDelimiter?: any; partitionOption?: SapTablePartitionOption; partitionSettings?: SapTablePartitionSettings; }; @@ -5912,6 +6316,10 @@ export type SftpReadSettings = StoreReadSettings & { recursive?: any; wildcardFolderPath?: any; wildcardFileName?: any; + enablePartitionDiscovery?: boolean; + partitionRootPath?: any; + fileListPath?: any; + deleteFilesAfterCompletion?: any; modifiedDatetimeStart?: any; modifiedDatetimeEnd?: any; }; @@ -5936,6 +6344,30 @@ export type SftpServerLinkedService = LinkedService & { export type SftpWriteSettings = StoreWriteSettings & { type: "SftpWriteSettings"; operationTimeout?: any; + useTempFileRename?: any; +}; + +// @public +export type SharePointOnlineListLinkedService = LinkedService & { + type: "SharePointOnlineList"; + siteUrl: any; + tenantId: any; + servicePrincipalId: any; + servicePrincipalKey: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type SharePointOnlineListResourceDataset = Dataset & { + type: "SharePointOnlineListResource"; + listName?: any; +}; + +// @public +export type SharePointOnlineListSource = CopySource & { + type: "SharePointOnlineListSource"; + query?: any; + httpRequestTimeout?: any; }; // @public @@ -5961,6 +6393,12 @@ export type ShopifySource = TabularSource & { query?: any; }; +// @public +export interface SkipErrorFile { + dataInconsistency?: any; + fileMissing?: any; +} + // @public export interface Sku { capacity?: number; @@ -5968,6 +6406,57 @@ export interface Sku { tier?: string; } +// @public +export type SnowflakeDataset = Dataset & { + type: "SnowflakeTable"; + schemaTypePropertiesSchema?: any; + table?: any; +}; + +// @public +export type SnowflakeExportCopyCommand = ExportSettings & { + type: "SnowflakeExportCopyCommand"; + additionalCopyOptions?: { + [propertyName: string]: any; + }; + additionalFormatOptions?: { + [propertyName: string]: any; + }; +}; + +// @public +export type SnowflakeImportCopyCommand = ImportSettings & { + type: "SnowflakeImportCopyCommand"; + additionalCopyOptions?: { + [propertyName: string]: any; + }; + additionalFormatOptions?: { + [propertyName: string]: any; + }; +}; + +// @public +export type SnowflakeLinkedService = LinkedService & { + type: "Snowflake"; + connectionString: any; + password?: AzureKeyVaultSecretReference; + encryptedCredential?: any; +}; + +// @public +export type SnowflakeSink = CopySink & { + type: "SnowflakeSink"; + preCopyScript?: any; + importSettings?: SnowflakeImportCopyCommand; +}; + +// @public +export type SnowflakeSource = CopySource & { + type: "SnowflakeSource"; + query?: any; + exportSettings?: SnowflakeExportCopyCommand; +}; + // @public export type SparkAuthenticationType = string; @@ -6090,8 +6579,7 @@ export type SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceResponse = SparkJ }; // @public -export class SparkJobDefinitionOperation { - constructor(client: ArtifactsClientContext); +export interface SparkJobDefinitionOperation { createOrUpdateSparkJobDefinition(sparkJobDefinitionName: string, sparkJobDefinition: SparkJobDefinitionResource, options?: SparkJobDefinitionCreateOrUpdateSparkJobDefinitionOptionalParams): Promise>; debugSparkJobDefinition(sparkJobDefinitionAzureResource: SparkJobDefinitionResource, options?: coreHttp.OperationOptions): Promise>; deleteSparkJobDefinition(sparkJobDefinitionName: string, options?: coreHttp.OperationOptions): Promise>; @@ -6275,6 +6763,8 @@ export type SqlDWSource = TabularSource & { sqlReaderQuery?: any; sqlReaderStoredProcedureName?: any; storedProcedureParameters?: any; + partitionOption?: any; + partitionSettings?: SqlPartitionSettings; }; // @public @@ -6299,8 +6789,20 @@ export type SqlMISource = TabularSource & { [propertyName: string]: StoredProcedureParameter; }; produceAdditionalTypes?: any; + partitionOption?: any; + partitionSettings?: SqlPartitionSettings; }; +// @public +export type SqlPartitionOption = string; + +// @public +export interface SqlPartitionSettings { + partitionColumnName?: any; + partitionLowerBound?: any; + partitionUpperBound?: any; +} + // @public export type SqlPool = TrackedResource & { sku?: Sku; @@ -6347,8 +6849,7 @@ export type SqlPoolsListResponse = SqlPoolInfoListResult & { }; // @public -export class SqlPoolsOperation { - constructor(client: ArtifactsClientContext); +export interface SqlPoolsOperation { get(sqlPoolName: string, options?: coreHttp.OperationOptions): Promise; list(options?: coreHttp.OperationOptions): Promise; } @@ -6429,8 +6930,7 @@ export interface SqlScriptMetadata { } // @public -export class SqlScriptOperation { - constructor(client: ArtifactsClientContext); +export interface SqlScriptOperation { createOrUpdateSqlScript(sqlScriptName: string, sqlScript: SqlScriptResource, options?: SqlScriptCreateOrUpdateSqlScriptOptionalParams): Promise>; deleteSqlScript(sqlScriptName: string, options?: coreHttp.OperationOptions): Promise>; getSqlScript(sqlScriptName: string, options?: SqlScriptGetSqlScriptOptionalParams): Promise; @@ -6487,6 +6987,8 @@ export type SqlServerSource = TabularSource & { [propertyName: string]: StoredProcedureParameter; }; produceAdditionalTypes?: any; + partitionOption?: any; + partitionSettings?: SqlPartitionSettings; }; // @public @@ -6527,11 +7029,15 @@ export type SqlSource = TabularSource & { storedProcedureParameters?: { [propertyName: string]: StoredProcedureParameter; }; + isolationLevel?: any; + partitionOption?: any; + partitionSettings?: SqlPartitionSettings; }; // @public export type SquareLinkedService = LinkedService & { type: "Square"; + connectionProperties?: any; host: any; clientId: any; clientSecret?: SecretBaseUnion; @@ -6604,6 +7110,7 @@ export interface SsisObjectMetadataStatusResponse { export interface SsisPackageLocation { accessCredential?: SsisAccessCredential; childPackages?: SsisChildPackage[]; + configurationAccessCredential?: SsisAccessCredential; configurationPath?: any; packageContent?: any; packageLastModifiedDate?: string; @@ -6670,11 +7177,11 @@ export interface StoreWriteSettings { [property: string]: any; copyBehavior?: any; maxConcurrentConnections?: any; - type: "SftpWriteSettings" | "AzureBlobStorageWriteSettings" | "AzureBlobFSWriteSettings" | "AzureDataLakeStoreWriteSettings" | "FileServerWriteSettings"; + type: "SftpWriteSettings" | "AzureBlobStorageWriteSettings" | "AzureBlobFSWriteSettings" | "AzureDataLakeStoreWriteSettings" | "FileServerWriteSettings" | "AzureFileStorageWriteSettings"; } // @public (undocumented) -export type StoreWriteSettingsUnion = StoreWriteSettings | SftpWriteSettings | AzureBlobStorageWriteSettings | AzureBlobFSWriteSettings | AzureDataLakeStoreWriteSettings | FileServerWriteSettings; +export type StoreWriteSettingsUnion = StoreWriteSettings | SftpWriteSettings | AzureBlobStorageWriteSettings | AzureBlobFSWriteSettings | AzureDataLakeStoreWriteSettings | FileServerWriteSettings | AzureFileStorageWriteSettings; // @public export type SubResource = AzureEntityResource & {}; @@ -6756,6 +7263,7 @@ export interface SynapseSparkJobReference { export type TabularSource = CopySource & { type: "TabularSource"; queryTimeout?: any; + additionalColumns?: AdditionalColumns[]; }; // @public (undocumented) @@ -6769,6 +7277,20 @@ export type TabularTranslator = CopyTranslator & { collectionReference?: any; mapComplexValuesToString?: any; mappings?: any; + typeConversion?: any; + typeConversionSettings?: TypeConversionSettings; +}; + +// @public +export type TarGZipReadSettings = CompressionReadSettings & { + type: "TarGZipReadSettings"; + preserveCompressionFileNameAsFolder?: any; +}; + +// @public +export type TarReadSettings = CompressionReadSettings & { + type: "TarReadSettings"; + preserveCompressionFileNameAsFolder?: any; }; // @public @@ -6844,7 +7366,7 @@ export interface Trigger { annotations?: any[]; description?: string; readonly runtimeState?: TriggerRuntimeState; - type: "RerunTumblingWindowTrigger" | "MultiplePipelineTrigger" | "ScheduleTrigger" | "BlobTrigger" | "BlobEventsTrigger" | "TumblingWindowTrigger" | "ChainingTrigger"; + type: "RerunTumblingWindowTrigger" | "MultiplePipelineTrigger" | "ScheduleTrigger" | "BlobTrigger" | "BlobEventsTrigger" | "CustomEventsTrigger" | "TumblingWindowTrigger" | "ChainingTrigger"; } // @public @@ -6920,8 +7442,7 @@ export interface TriggerListResponse { } // @public -export class TriggerOperation { - constructor(client: ArtifactsClientContext); +export interface TriggerOperation { createOrUpdateTrigger(triggerName: string, trigger: TriggerResource, options?: TriggerCreateOrUpdateTriggerOptionalParams): Promise>; deleteTrigger(triggerName: string, options?: coreHttp.OperationOptions): Promise>; getEventSubscriptionStatus(triggerName: string, options?: coreHttp.OperationOptions): Promise; @@ -6973,8 +7494,7 @@ export interface TriggerRun { } // @public -export class TriggerRunOperation { - constructor(client: ArtifactsClientContext); +export interface TriggerRunOperation { cancelTriggerInstance(triggerName: string, runId: string, options?: coreHttp.OperationOptions): Promise; queryTriggerRunsByWorkspace(filterParameters: RunFilterParameters, options?: coreHttp.OperationOptions): Promise; rerunTriggerInstance(triggerName: string, runId: string, options?: coreHttp.OperationOptions): Promise; @@ -7054,6 +7574,16 @@ export type TumblingWindowTriggerDependencyReference = TriggerDependencyReferenc // @public export type Type = string; +// @public +export interface TypeConversionSettings { + allowDataTruncation?: any; + culture?: any; + dateTimeFormat?: any; + dateTimeOffsetFormat?: any; + timeSpanFormat?: any; + treatBooleanAsNumber?: any; +} + // @public export type UntilActivity = ControlActivity & { type: "Until"; @@ -7117,7 +7647,7 @@ export interface VirtualNetworkProfile { // @public export type WaitActivity = ControlActivity & { type: "Wait"; - waitTimeInSeconds: number; + waitTimeInSeconds: any; }; // @public @@ -7200,6 +7730,7 @@ export type WebLinkedServiceTypePropertiesUnion = WebLinkedServiceTypeProperties // @public export type WebSource = CopySource & { type: "WebSource"; + additionalColumns?: AdditionalColumns[]; }; // @public @@ -7256,8 +7787,7 @@ export type WorkspaceGitRepoManagementGetGitHubAccessTokenResponse = GitHubAcces }; // @public -export class WorkspaceGitRepoManagementOperation { - constructor(client: ArtifactsClientContext); +export interface WorkspaceGitRepoManagementOperation { getGitHubAccessToken(gitHubAccessTokenRequest: GitHubAccessTokenRequest, options?: WorkspaceGitRepoManagementGetGitHubAccessTokenOptionalParams): Promise; } @@ -7275,8 +7805,7 @@ export interface WorkspaceKeyDetails { } // @public -export class WorkspaceOperation { - constructor(client: ArtifactsClientContext); +export interface WorkspaceOperation { get(options?: coreHttp.OperationOptions): Promise; } @@ -7304,6 +7833,7 @@ export interface WorkspaceUpdateParameters { // @public export type XeroLinkedService = LinkedService & { type: "Xero"; + connectionProperties?: any; host: any; consumerKey?: SecretBaseUnion; privateKey?: SecretBaseUnion; @@ -7325,9 +7855,43 @@ export type XeroSource = TabularSource & { query?: any; }; +// @public +export type XmlDataset = Dataset & { + type: "Xml"; + location?: DatasetLocationUnion; + encodingName?: any; + nullValue?: any; + compression?: DatasetCompressionUnion; +}; + +// @public +export type XmlReadSettings = FormatReadSettings & { + type: "XmlReadSettings"; + compressionProperties?: CompressionReadSettingsUnion; + validationMode?: any; + detectDataType?: any; + namespaces?: any; + namespacePrefixes?: any; +}; + +// @public +export type XmlSource = CopySource & { + type: "XmlSource"; + storeSettings?: StoreReadSettingsUnion; + formatSettings?: XmlReadSettings; + additionalColumns?: AdditionalColumns[]; +}; + +// @public +export type ZipDeflateReadSettings = CompressionReadSettings & { + type: "ZipDeflateReadSettings"; + preserveZipFileNameAsFolder?: any; +}; + // @public export type ZohoLinkedService = LinkedService & { type: "Zoho"; + connectionProperties?: any; endpoint: any; accessToken?: SecretBaseUnion; useEncryptedEndpoints?: any; diff --git a/sdk/synapse/synapse-artifacts/src/artifactsClient.ts b/sdk/synapse/synapse-artifacts/src/artifactsClient.ts index cc49f763a630..e11660c38a91 100644 --- a/sdk/synapse/synapse-artifacts/src/artifactsClient.ts +++ b/sdk/synapse/synapse-artifacts/src/artifactsClient.ts @@ -7,6 +7,25 @@ */ import * as coreHttp from "@azure/core-http"; +import { + LinkedServiceImpl, + DatasetImpl, + PipelineImpl, + PipelineRunImpl, + TriggerImpl, + TriggerRunImpl, + DataFlowImpl, + DataFlowDebugSessionImpl, + SqlScriptImpl, + SparkJobDefinitionImpl, + NotebookImpl, + WorkspaceImpl, + SqlPoolsImpl, + BigDataPoolsImpl, + IntegrationRuntimesImpl, + LibraryImpl, + WorkspaceGitRepoManagementImpl +} from "./operations"; import { LinkedService, Dataset, @@ -25,7 +44,7 @@ import { IntegrationRuntimes, Library, WorkspaceGitRepoManagement -} from "./operations"; +} from "./operationsInterfaces"; import { ArtifactsClientContext } from "./artifactsClientContext"; import { ArtifactsClientOptionalParams } from "./models"; @@ -43,23 +62,23 @@ export class ArtifactsClient extends ArtifactsClientContext { options?: ArtifactsClientOptionalParams ) { super(credentials, endpoint, options); - this.linkedService = new LinkedService(this); - this.dataset = new Dataset(this); - this.pipeline = new Pipeline(this); - this.pipelineRun = new PipelineRun(this); - this.trigger = new Trigger(this); - this.triggerRun = new TriggerRun(this); - this.dataFlow = new DataFlow(this); - this.dataFlowDebugSession = new DataFlowDebugSession(this); - this.sqlScript = new SqlScript(this); - this.sparkJobDefinition = new SparkJobDefinition(this); - this.notebook = new Notebook(this); - this.workspace = new Workspace(this); - this.sqlPools = new SqlPools(this); - this.bigDataPools = new BigDataPools(this); - this.integrationRuntimes = new IntegrationRuntimes(this); - this.library = new Library(this); - this.workspaceGitRepoManagement = new WorkspaceGitRepoManagement(this); + this.linkedService = new LinkedServiceImpl(this); + this.dataset = new DatasetImpl(this); + this.pipeline = new PipelineImpl(this); + this.pipelineRun = new PipelineRunImpl(this); + this.trigger = new TriggerImpl(this); + this.triggerRun = new TriggerRunImpl(this); + this.dataFlow = new DataFlowImpl(this); + this.dataFlowDebugSession = new DataFlowDebugSessionImpl(this); + this.sqlScript = new SqlScriptImpl(this); + this.sparkJobDefinition = new SparkJobDefinitionImpl(this); + this.notebook = new NotebookImpl(this); + this.workspace = new WorkspaceImpl(this); + this.sqlPools = new SqlPoolsImpl(this); + this.bigDataPools = new BigDataPoolsImpl(this); + this.integrationRuntimes = new IntegrationRuntimesImpl(this); + this.library = new LibraryImpl(this); + this.workspaceGitRepoManagement = new WorkspaceGitRepoManagementImpl(this); } linkedService: LinkedService; diff --git a/sdk/synapse/synapse-artifacts/src/artifactsClientContext.ts b/sdk/synapse/synapse-artifacts/src/artifactsClientContext.ts index c57fefd24394..580169496b45 100644 --- a/sdk/synapse/synapse-artifacts/src/artifactsClientContext.ts +++ b/sdk/synapse/synapse-artifacts/src/artifactsClientContext.ts @@ -11,7 +11,7 @@ import { ArtifactsClientOptionalParams } from "./models"; import { lroPolicy } from "./lro"; const packageName = "@azure/synapse-artifacts"; -const packageVersion = "1.0.0-beta.3"; +const packageVersion = "1.0.0-beta.4"; export class ArtifactsClientContext extends coreHttp.ServiceClient { endpoint: string; @@ -56,12 +56,17 @@ export class ArtifactsClientContext extends coreHttp.ServiceClient { // When an array of factories is passed in, we'll just add the required factories, // in this case lroPolicy(). It is important to note that passing an array of factories // to a new client, bypasses core-http default factories. Just the pipelines provided will be run. - options.requestPolicyFactories = [lroPolicy(), ...options.requestPolicyFactories]; + options.requestPolicyFactories = [ + lroPolicy(), + ...options.requestPolicyFactories + ]; } else if (options.requestPolicyFactories) { // When we were passed a requestPolicyFactories as a function, we'll create a new one that adds the factories provided // in the options plus the required policies. When using this path, the pipelines passed to the client will be added to the // default policies added by core-http - const optionsPolicies = options.requestPolicyFactories([lroPolicy()]) || [lroPolicy()]; + const optionsPolicies = options.requestPolicyFactories([lroPolicy()]) || [ + lroPolicy() + ]; options.requestPolicyFactories = (defaultFactories) => [ ...optionsPolicies, ...defaultFactories @@ -69,7 +74,10 @@ export class ArtifactsClientContext extends coreHttp.ServiceClient { } else { // In case no request policy factories were provided, we'll just need to create a function that will add // the lroPolicy to the default pipelines added by core-http - options.requestPolicyFactories = (defaultFactories) => [lroPolicy(), ...defaultFactories]; + options.requestPolicyFactories = (defaultFactories) => [ + lroPolicy(), + ...defaultFactories + ]; } super(credentials, options); diff --git a/sdk/synapse/synapse-artifacts/src/index.ts b/sdk/synapse/synapse-artifacts/src/index.ts index 027450c31368..350ba0271609 100644 --- a/sdk/synapse/synapse-artifacts/src/index.ts +++ b/sdk/synapse/synapse-artifacts/src/index.ts @@ -41,4 +41,4 @@ export { Notebook as NotebookOperation, LinkedService as LinkedServiceOperation, IntegrationRuntimes as IntegrationRuntimesOperation -} from "./operations"; +} from "./operationsInterfaces"; diff --git a/sdk/synapse/synapse-artifacts/src/lro/azureAsyncOperationStrategy.ts b/sdk/synapse/synapse-artifacts/src/lro/azureAsyncOperationStrategy.ts index 542685ccb1bd..f54ccc954f04 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/azureAsyncOperationStrategy.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/azureAsyncOperationStrategy.ts @@ -14,7 +14,11 @@ import { FinalStateVia, LROSYM } from "./models"; -import { OperationSpec, OperationArguments, OperationResponse } from "@azure/core-http"; +import { + OperationSpec, + OperationArguments, + OperationResponse +} from "@azure/core-http"; import { terminalStates } from "./constants"; import { SendOperationFn } from "."; @@ -25,11 +29,14 @@ export function createAzureAsyncOperationStrategy( ): LROStrategy { const lroData = initialOperation.result._response[LROSYM]; if (!lroData) { - throw new Error("Expected lroData to be defined for Azure-AsyncOperation strategy"); + throw new Error( + "Expected lroData to be defined for Azure-AsyncOperation strategy" + ); } let currentOperation = initialOperation; - let lastKnownPollingUrl = lroData.azureAsyncOperation || lroData.operationLocation; + let lastKnownPollingUrl = + lroData.azureAsyncOperation || lroData.operationLocation; return { isTerminal: () => { @@ -60,12 +67,17 @@ export function createAzureAsyncOperationStrategy( const initialOperationResult = initialOperation.result._response[LROSYM]; const currentOperationResult = currentOperation.result._response[LROSYM]; - if (!shouldPerformFinalGet(initialOperationResult, currentOperationResult)) { + if ( + !shouldPerformFinalGet(initialOperationResult, currentOperationResult) + ) { return currentOperation; } if (initialOperationResult?.requestMethod === "PUT") { - currentOperation = await sendFinalGet(initialOperation, sendOperationFn); + currentOperation = await sendFinalGet( + initialOperation, + sendOperationFn + ); return currentOperation; } @@ -73,20 +85,29 @@ export function createAzureAsyncOperationStrategy( if (initialOperationResult?.location) { switch (finalStateVia) { case "original-uri": - currentOperation = await sendFinalGet(initialOperation, sendOperationFn); + currentOperation = await sendFinalGet( + initialOperation, + sendOperationFn + ); return currentOperation; case "azure-async-operation": return currentOperation; case "location": default: - const location = initialOperationResult.location || currentOperationResult?.location; + const location = + initialOperationResult.location || + currentOperationResult?.location; if (!location) { throw new Error("Couldn't determine final GET URL from location"); } - return await sendFinalGet(initialOperation, sendOperationFn, location); + return await sendFinalGet( + initialOperation, + sendOperationFn, + location + ); } } @@ -164,7 +185,10 @@ function getCompositeMappers(responses: { }, {} as { [responseCode: string]: OperationResponse }); } -function shouldPerformFinalGet(initialResult?: LROResponseInfo, currentResult?: LROResponseInfo) { +function shouldPerformFinalGet( + initialResult?: LROResponseInfo, + currentResult?: LROResponseInfo +) { const { status } = currentResult || {}; const { requestMethod: initialRequestMethod, location } = initialResult || {}; if (status && status.toLowerCase() !== "succeeded") { diff --git a/sdk/synapse/synapse-artifacts/src/lro/bodyPollingStrategy.ts b/sdk/synapse/synapse-artifacts/src/lro/bodyPollingStrategy.ts index dba5f3280cd0..c9404330ed76 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/bodyPollingStrategy.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/bodyPollingStrategy.ts @@ -52,7 +52,10 @@ export function createBodyPollingStrategy( }; // Execute the polling operation - initialOperation.result = await sendOperation(initialOperation.args, pollingSpec); + initialOperation.result = await sendOperation( + initialOperation.args, + pollingSpec + ); return initialOperation; } }; diff --git a/sdk/synapse/synapse-artifacts/src/lro/locationStrategy.ts b/sdk/synapse/synapse-artifacts/src/lro/locationStrategy.ts index 1cfb103ecacc..6a763e37ecd1 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/locationStrategy.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/locationStrategy.ts @@ -16,7 +16,9 @@ export function createLocationStrategy( ): LROStrategy { const lroData = initialOperation.result._response[LROSYM]; if (!lroData) { - throw new Error("Expected lroData to be defined for Azure-AsyncOperation strategy"); + throw new Error( + "Expected lroData to be defined for Azure-AsyncOperation strategy" + ); } let currentOperation = initialOperation; @@ -57,7 +59,8 @@ export function createLocationStrategy( const result = await sendOperationFn(pollingArgs, pollingSpec); // Update latest polling url - lastKnownPollingUrl = result._response[LROSYM]?.location || lastKnownPollingUrl; + lastKnownPollingUrl = + result._response[LROSYM]?.location || lastKnownPollingUrl; // Update lastOperation result currentOperation = { diff --git a/sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts b/sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts index 94c6d089d5dc..4d18c3b7f0ce 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts @@ -29,8 +29,12 @@ class LROPolicy extends BaseRequestPolicy { super(nextPolicy, options); } - public async sendRequest(webResource: WebResource): Promise { - let result: LROOperationResponse = await this._nextPolicy.sendRequest(webResource); + public async sendRequest( + webResource: WebResource + ): Promise { + let result: LROOperationResponse = await this._nextPolicy.sendRequest( + webResource + ); const _lroData = getLROData(result); result[LROSYM] = _lroData; diff --git a/sdk/synapse/synapse-artifacts/src/lro/lroPoller.ts b/sdk/synapse/synapse-artifacts/src/lro/lroPoller.ts index 3fff20ddaa9c..8c0406cf2468 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/lroPoller.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/lroPoller.ts @@ -7,8 +7,19 @@ */ import { Poller } from "@azure/core-lro"; -import { OperationSpec, OperationArguments, delay, RestError } from "@azure/core-http"; -import { BaseResult, LROOperationState, LROOperationStep, FinalStateVia, LROSYM } from "./models"; +import { + OperationSpec, + OperationArguments, + delay, + RestError +} from "@azure/core-http"; +import { + BaseResult, + LROOperationState, + LROOperationStep, + FinalStateVia, + LROSYM +} from "./models"; import { makeOperation } from "./operation"; import { createBodyPollingStrategy } from "./bodyPollingStrategy"; import { createAzureAsyncOperationStrategy } from "./azureAsyncOperationStrategy"; @@ -67,7 +78,11 @@ export class LROPoller extends Poller< result: initialOperationResult }; - const pollingStrategy = getPollingStrategy(initialOperation, sendOperation, finalStateVia); + const pollingStrategy = getPollingStrategy( + initialOperation, + sendOperation, + finalStateVia + ); const state: LROOperationState = { // Initial operation will become the last operation @@ -120,7 +135,11 @@ function getPollingStrategy( } if (lroData.azureAsyncOperation || lroData.operationLocation) { - return createAzureAsyncOperationStrategy(initialOperation, sendOperationFn, finalStateVia); + return createAzureAsyncOperationStrategy( + initialOperation, + sendOperationFn, + finalStateVia + ); } if (lroData.location) { diff --git a/sdk/synapse/synapse-artifacts/src/lro/models.ts b/sdk/synapse/synapse-artifacts/src/lro/models.ts index 73502255a4dd..c4b96664f664 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/models.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/models.ts @@ -16,7 +16,10 @@ import { import { PollOperationState, PollOperation } from "@azure/core-lro"; export const LROSYM = Symbol("LROData"); -export type FinalStateVia = "azure-async-operation" | "location" | "original-uri"; +export type FinalStateVia = + | "azure-async-operation" + | "location" + | "original-uri"; export interface LROResponseInfo { requestMethod: HttpMethods; @@ -52,7 +55,8 @@ export interface LROOperationStep { result: TResult; } -export interface LROOperationState extends PollOperationState { +export interface LROOperationState + extends PollOperationState { lastOperation: LROOperationStep; initialOperation: LROOperationStep; pollingStrategy: LROStrategy; diff --git a/sdk/synapse/synapse-artifacts/src/lro/operation.ts b/sdk/synapse/synapse-artifacts/src/lro/operation.ts index dc299b243c55..0860f07df0b7 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/operation.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/operation.ts @@ -49,7 +49,9 @@ async function update( const currentLroData = currentResponse.result._response[LROSYM]; if (!currentLroData) { - throw new Error("Expected lroData to be defined for updating LRO operation"); + throw new Error( + "Expected lroData to be defined for updating LRO operation" + ); } if (state.result) { diff --git a/sdk/synapse/synapse-artifacts/src/lro/requestUtils.ts b/sdk/synapse/synapse-artifacts/src/lro/requestUtils.ts index 3a6986f5a8fd..e9af4cde5e25 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/requestUtils.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/requestUtils.ts @@ -29,10 +29,17 @@ export function shouldDeserializeLRO(finalStateVia?: string) { isInitialRequest = false; } - if (initialOperationInfo.azureAsyncOperation || initialOperationInfo.operationLocation) { + if ( + initialOperationInfo.azureAsyncOperation || + initialOperationInfo.operationLocation + ) { return ( !isInitialRequest && - isAsyncOperationFinalResponse(response, initialOperationInfo, finalStateVia) + isAsyncOperationFinalResponse( + response, + initialOperationInfo, + finalStateVia + ) ); } @@ -70,7 +77,10 @@ function isAsyncOperationFinalResponse( return true; } - if (initialOperationInfo.requestMethod !== "PUT" && !initialOperationInfo.location) { + if ( + initialOperationInfo.requestMethod !== "PUT" && + !initialOperationInfo.location + ) { return true; } diff --git a/sdk/synapse/synapse-artifacts/src/models/index.ts b/sdk/synapse/synapse-artifacts/src/models/index.ts index 63bf143e5f73..16944c3f92d4 100644 --- a/sdk/synapse/synapse-artifacts/src/models/index.ts +++ b/sdk/synapse/synapse-artifacts/src/models/index.ts @@ -45,6 +45,7 @@ export type LinkedServiceUnion = | WebLinkedService | CassandraLinkedService | MongoDbLinkedService + | MongoDbAtlasLinkedService | MongoDbV2LinkedService | CosmosDbMongoDbApiLinkedService | AzureDataLakeStoreLinkedService @@ -98,21 +99,28 @@ export type LinkedServiceUnion = | HDInsightOnDemandLinkedService | AzureDataLakeAnalyticsLinkedService | AzureDatabricksLinkedService + | AzureDatabricksDeltaLakeLinkedService | ResponsysLinkedService | DynamicsAXLinkedService | OracleServiceCloudLinkedService | GoogleAdWordsLinkedService | SapTableLinkedService | AzureDataExplorerLinkedService - | AzureFunctionLinkedService; + | AzureFunctionLinkedService + | SnowflakeLinkedService + | SharePointOnlineListLinkedService; export type DatasetUnion = | Dataset + | AmazonS3Dataset | AvroDataset + | ExcelDataset | ParquetDataset | DelimitedTextDataset | JsonDataset + | XmlDataset | OrcDataset | BinaryDataset + | AzureBlobDataset | AzureTableDataset | AzureSqlTableDataset | AzureSqlMITableDataset @@ -124,8 +132,12 @@ export type DatasetUnion = | DynamicsEntityDataset | DynamicsCrmEntityDataset | CommonDataServiceForAppsEntityDataset + | AzureDataLakeStoreDataset + | AzureBlobFSDataset | Office365Dataset + | FileShareDataset | MongoDbCollectionDataset + | MongoDbAtlasCollectionDataset | MongoDbV2CollectionDataset | CosmosDbMongoDbApiCollectionDataset | ODataResourceDataset @@ -153,6 +165,7 @@ export type DatasetUnion = | SapTableResourceDataset | WebTableDataset | AzureSearchIndexDataset + | HttpDataset | AmazonMWSObjectDataset | AzurePostgreSqlTableDataset | ConcurObjectDataset @@ -187,7 +200,10 @@ export type DatasetUnion = | DynamicsAXResourceDataset | OracleServiceCloudObjectDataset | AzureDataExplorerTableDataset - | GoogleAdWordsObjectDataset; + | GoogleAdWordsObjectDataset + | SnowflakeDataset + | SharePointOnlineListResourceDataset + | AzureDatabricksDeltaLakeDataset; export type ActivityUnion = | Activity | ControlActivityUnion @@ -204,7 +220,10 @@ export type IntegrationRuntimeUnion = | IntegrationRuntime | ManagedIntegrationRuntime | SelfHostedIntegrationRuntime; -export type SecretBaseUnion = SecretBase | SecureString | AzureKeyVaultSecretReference; +export type SecretBaseUnion = + | SecretBase + | SecureString + | AzureKeyVaultSecretReference; export type DatasetLocationUnion = | DatasetLocation | AzureBlobStorageLocation @@ -230,7 +249,9 @@ export type DatasetCompressionUnion = | DatasetBZip2Compression | DatasetGZipCompression | DatasetDeflateCompression - | DatasetZipDeflateCompression; + | DatasetZipDeflateCompression + | DatasetTarCompression + | DatasetTarGZipCompression; export type WebLinkedServiceTypePropertiesUnion = | WebLinkedServiceTypeProperties | WebAnonymousAuthentication @@ -255,19 +276,34 @@ export type StoreWriteSettingsUnion = | AzureBlobStorageWriteSettings | AzureBlobFSWriteSettings | AzureDataLakeStoreWriteSettings - | FileServerWriteSettings; -export type FormatReadSettingsUnion = FormatReadSettings | DelimitedTextReadSettings; + | FileServerWriteSettings + | AzureFileStorageWriteSettings; +export type FormatReadSettingsUnion = + | FormatReadSettings + | DelimitedTextReadSettings + | JsonReadSettings + | XmlReadSettings + | BinaryReadSettings; +export type CompressionReadSettingsUnion = + | CompressionReadSettings + | ZipDeflateReadSettings + | TarReadSettings + | TarGZipReadSettings; export type FormatWriteSettingsUnion = | FormatWriteSettings | AvroWriteSettings + | OrcWriteSettings + | ParquetWriteSettings | DelimitedTextWriteSettings | JsonWriteSettings; export type CopySourceUnion = | CopySource | AvroSource + | ExcelSource | ParquetSource | DelimitedTextSource | JsonSource + | XmlSource | OrcSource | BinarySource | TabularSourceUnion @@ -288,19 +324,25 @@ export type CopySourceUnion = | OracleSource | WebSource | MongoDbSource + | MongoDbAtlasSource | MongoDbV2Source | CosmosDbMongoDbApiSource | Office365Source | AzureDataLakeStoreSource | AzureBlobFSSource - | HttpSource; + | HttpSource + | SnowflakeSource + | AzureDatabricksDeltaLakeSource + | SharePointOnlineListSource; export type CopySinkUnion = | CopySink | DelimitedTextSink | JsonSink | OrcSink + | RestSink | AzurePostgreSqlSink | AzureMySqlSink + | AzureDatabricksDeltaLakeSink | SapCloudForCustomerSink | AzureQueueSink | AzureTableSink @@ -316,6 +358,7 @@ export type CopySinkUnion = | AzureSqlSink | SqlMISink | SqlDWSink + | SnowflakeSink | OracleSink | AzureDataLakeStoreSink | AzureBlobFSSink @@ -330,6 +373,14 @@ export type CopySinkUnion = | SalesforceSink | SalesforceServiceCloudSink | CosmosDbMongoDbApiSink; +export type ExportSettingsUnion = + | ExportSettings + | SnowflakeExportCopyCommand + | AzureDatabricksDeltaLakeExportCommand; +export type ImportSettingsUnion = + | ImportSettings + | AzureDatabricksDeltaLakeImportCommand + | SnowflakeImportCopyCommand; export type CopyTranslatorUnion = CopyTranslator | TabularTranslator; export type DependencyReferenceUnion = | DependencyReference @@ -383,7 +434,8 @@ export type MultiplePipelineTriggerUnion = | MultiplePipelineTrigger | ScheduleTrigger | BlobTrigger - | BlobEventsTrigger; + | BlobEventsTrigger + | CustomEventsTrigger; export type TabularSourceUnion = | TabularSource | AzureTableSource @@ -493,6 +545,7 @@ export interface LinkedService { | "Web" | "Cassandra" | "MongoDb" + | "MongoDbAtlas" | "MongoDbV2" | "CosmosDbMongoDbApi" | "AzureDataLakeStore" @@ -546,13 +599,16 @@ export interface LinkedService { | "HDInsightOnDemand" | "AzureDataLakeAnalytics" | "AzureDatabricks" + | "AzureDatabricksDeltaLake" | "Responsys" | "DynamicsAX" | "OracleServiceCloud" | "GoogleAdWords" | "SapTable" | "AzureDataExplorer" - | "AzureFunction"; + | "AzureFunction" + | "Snowflake" + | "SharePointOnlineList"; /** Describes unknown properties. The value of an unknown property can be of "any" type. */ [property: string]: any; /** The integration runtime reference. */ @@ -632,12 +688,16 @@ export interface DatasetListResponse { export interface Dataset { /** Polymorphic discriminator, which specifies the different types this object can be */ type: + | "AmazonS3Object" | "Avro" + | "Excel" | "Parquet" | "DelimitedText" | "Json" + | "Xml" | "Orc" | "Binary" + | "AzureBlob" | "AzureTable" | "AzureSqlTable" | "AzureSqlMITable" @@ -649,8 +709,12 @@ export interface Dataset { | "DynamicsEntity" | "DynamicsCrmEntity" | "CommonDataServiceForAppsEntity" + | "AzureDataLakeStoreFile" + | "AzureBlobFSFile" | "Office365Table" + | "FileShare" | "MongoDbCollection" + | "MongoDbAtlasCollection" | "MongoDbV2Collection" | "CosmosDbMongoDbApiCollection" | "ODataResource" @@ -678,6 +742,7 @@ export interface Dataset { | "SapTableResource" | "WebTable" | "AzureSearchIndex" + | "HttpFile" | "AmazonMWSObject" | "AzurePostgreSqlTable" | "ConcurObject" @@ -712,7 +777,10 @@ export interface Dataset { | "DynamicsAXResource" | "OracleServiceCloudObject" | "AzureDataExplorerTable" - | "GoogleAdWordsObject"; + | "GoogleAdWordsObject" + | "SnowflakeTable" + | "SharePointOnlineListResource" + | "AzureDatabricksDeltaLakeDataset"; /** Describes unknown properties. The value of an unknown property can be of "any" type. */ [property: string]: any; /** Dataset description. */ @@ -1069,6 +1137,7 @@ export interface Trigger { | "ScheduleTrigger" | "BlobTrigger" | "BlobEventsTrigger" + | "CustomEventsTrigger" | "TumblingWindowTrigger" | "ChainingTrigger"; /** Describes unknown properties. The value of an unknown property can be of "any" type. */ @@ -2383,7 +2452,12 @@ export interface DatasetSchemaDataElement { /** The format definition of a storage. */ export interface DatasetStorageFormat { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "TextFormat" | "JsonFormat" | "AvroFormat" | "OrcFormat" | "ParquetFormat"; + type: + | "TextFormat" + | "JsonFormat" + | "AvroFormat" + | "OrcFormat" + | "ParquetFormat"; /** Describes unknown properties. The value of an unknown property can be of "any" type. */ [property: string]: any; /** Serializer. Type: string (or Expression with resultType string). */ @@ -2395,7 +2469,7 @@ export interface DatasetStorageFormat { /** The compression method used on a dataset. */ export interface DatasetCompression { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "BZip2" | "GZip" | "Deflate" | "ZipDeflate"; + type: "BZip2" | "GZip" | "Deflate" | "ZipDeflate" | "Tar" | "TarGZip"; /** Describes unknown properties. The value of an unknown property can be of "any" type. */ [property: string]: any; } @@ -2465,7 +2539,8 @@ export interface StoreWriteSettings { | "AzureBlobStorageWriteSettings" | "AzureBlobFSWriteSettings" | "AzureDataLakeStoreWriteSettings" - | "FileServerWriteSettings"; + | "FileServerWriteSettings" + | "AzureFileStorageWriteSettings"; /** Describes unknown properties. The value of an unknown property can be of "any" type. */ [property: string]: any; /** The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). */ @@ -2487,7 +2562,19 @@ export interface DistcpSettings { /** Format read settings. */ export interface FormatReadSettings { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "DelimitedTextReadSettings"; + type: + | "DelimitedTextReadSettings" + | "JsonReadSettings" + | "XmlReadSettings" + | "BinaryReadSettings"; + /** Describes unknown properties. The value of an unknown property can be of "any" type. */ + [property: string]: any; +} + +/** Compression read settings. */ +export interface CompressionReadSettings { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "ZipDeflateReadSettings" | "TarReadSettings" | "TarGZipReadSettings"; /** Describes unknown properties. The value of an unknown property can be of "any" type. */ [property: string]: any; } @@ -2495,19 +2582,34 @@ export interface FormatReadSettings { /** Format write settings. */ export interface FormatWriteSettings { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "AvroWriteSettings" | "DelimitedTextWriteSettings" | "JsonWriteSettings"; + type: + | "AvroWriteSettings" + | "OrcWriteSettings" + | "ParquetWriteSettings" + | "DelimitedTextWriteSettings" + | "JsonWriteSettings"; /** Describes unknown properties. The value of an unknown property can be of "any" type. */ [property: string]: any; } +/** Specify the column name and value of additional columns. */ +export interface AdditionalColumns { + /** Additional column name. Type: string (or Expression with resultType string). */ + name?: any; + /** Additional column value. Type: string (or Expression with resultType string). */ + value?: any; +} + /** A copy activity source. */ export interface CopySource { /** Polymorphic discriminator, which specifies the different types this object can be */ type: | "AvroSource" + | "ExcelSource" | "ParquetSource" | "DelimitedTextSource" | "JsonSource" + | "XmlSource" | "OrcSource" | "BinarySource" | "TabularSource" @@ -2550,6 +2652,7 @@ export interface CopySource { | "WebSource" | "CassandraSource" | "MongoDbSource" + | "MongoDbAtlasSource" | "MongoDbV2Source" | "CosmosDbMongoDbApiSource" | "Office365Source" @@ -2590,7 +2693,10 @@ export interface CopySource { | "DynamicsAXSource" | "OracleServiceCloudSource" | "GoogleAdWordsSource" - | "AmazonRedshiftSource"; + | "AmazonRedshiftSource" + | "SnowflakeSource" + | "AzureDatabricksDeltaLakeSource" + | "SharePointOnlineListSource"; /** Describes unknown properties. The value of an unknown property can be of "any" type. */ [property: string]: any; /** Source retry count. Type: integer (or Expression with resultType integer). */ @@ -2608,8 +2714,10 @@ export interface CopySink { | "DelimitedTextSink" | "JsonSink" | "OrcSink" + | "RestSink" | "AzurePostgreSqlSink" | "AzureMySqlSink" + | "AzureDatabricksDeltaLakeSink" | "SapCloudForCustomerSink" | "AzureQueueSink" | "AzureTableSink" @@ -2625,6 +2733,7 @@ export interface CopySink { | "AzureSqlSink" | "SqlMISink" | "SqlDWSink" + | "SnowflakeSink" | "OracleSink" | "AzureDataLakeStoreSink" | "AzureBlobFSSink" @@ -2675,6 +2784,54 @@ export interface RedirectIncompatibleRowSettings { path?: any; } +/** (Deprecated. Please use LogSettings) Log storage settings. */ +export interface LogStorageSettings { + /** Describes unknown properties. The value of an unknown property can be of "any" type. */ + [property: string]: any; + /** Log storage linked service reference. */ + linkedServiceName: LinkedServiceReference; + /** The path to storage for storing detailed logs of activity execution. Type: string (or Expression with resultType string). */ + path?: any; + /** Gets or sets the log level, support: Info, Warning. Type: string (or Expression with resultType string). */ + logLevel?: any; + /** Specifies whether to enable reliable logging. Type: boolean (or Expression with resultType boolean). */ + enableReliableLogging?: any; +} + +/** Log settings. */ +export interface LogSettings { + /** Specifies whether to enable copy activity log. Type: boolean (or Expression with resultType boolean). */ + enableCopyActivityLog?: any; + /** Specifies settings for copy activity log. */ + copyActivityLogSettings?: CopyActivityLogSettings; + /** Log location settings customer needs to provide when enabling log. */ + logLocationSettings: LogLocationSettings; +} + +/** Settings for copy activity log. */ +export interface CopyActivityLogSettings { + /** Gets or sets the log level, support: Info, Warning. Type: string (or Expression with resultType string). */ + logLevel?: any; + /** Specifies whether to enable reliable logging. Type: boolean (or Expression with resultType boolean). */ + enableReliableLogging?: any; +} + +/** Log location settings. */ +export interface LogLocationSettings { + /** Log storage linked service reference. */ + linkedServiceName: LinkedServiceReference; + /** The path to storage for storing detailed logs of activity execution. Type: string (or Expression with resultType string). */ + path?: any; +} + +/** Skip error file. */ +export interface SkipErrorFile { + /** Skip if file is deleted by other client during copy. Default is true. Type: boolean (or Expression with resultType boolean). */ + fileMissing?: any; + /** Skip if source/sink file changed by other concurrent write. Default is false. Type: boolean (or Expression with resultType boolean). */ + dataInconsistency?: any; +} + /** The settings that will be leveraged for SAP HANA source partitioning. */ export interface SapHanaPartitionSettings { /** The name of the column that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */ @@ -2701,6 +2858,16 @@ export interface StoredProcedureParameter { type?: StoredProcedureParameterType; } +/** The settings that will be leveraged for Sql source partitioning. */ +export interface SqlPartitionSettings { + /** The name of the column in integer or datetime type that will be used for proceeding partitioning. If not specified, the primary key of the table is auto-detected and used as the partition column. Type: string (or Expression with resultType string). */ + partitionColumnName?: any; + /** The maximum value of the partition column for partition range splitting. This value is used to decide the partition stride, not for filtering the rows in table. All rows in the table or query result will be partitioned and copied. Type: string (or Expression with resultType string). */ + partitionUpperBound?: any; + /** The minimum value of the partition column for partition range splitting. This value is used to decide the partition stride, not for filtering the rows in table. All rows in the table or query result will be partitioned and copied. Type: string (or Expression with resultType string). */ + partitionLowerBound?: any; +} + /** The settings that will be leveraged for Oracle source partitioning. */ export interface OraclePartitionSettings { /** Names of the physical partitions of Oracle table. */ @@ -2755,6 +2922,22 @@ export interface RedshiftUnloadSettings { bucketName: any; } +/** Export command settings. */ +export interface ExportSettings { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "SnowflakeExportCopyCommand" | "AzureDatabricksDeltaLakeExportCommand"; + /** Describes unknown properties. The value of an unknown property can be of "any" type. */ + [property: string]: any; +} + +/** Import command settings. */ +export interface ImportSettings { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "AzureDatabricksDeltaLakeImportCommand" | "SnowflakeImportCopyCommand"; + /** Describes unknown properties. The value of an unknown property can be of "any" type. */ + [property: string]: any; +} + /** PolyBase settings. */ export interface PolybaseSettings { /** Describes unknown properties. The value of an unknown property can be of "any" type. */ @@ -2785,16 +2968,6 @@ export interface DWCopyCommandDefaultValue { defaultValue?: any; } -/** Log storage settings. */ -export interface LogStorageSettings { - /** Describes unknown properties. The value of an unknown property can be of "any" type. */ - [property: string]: any; - /** Log storage linked service reference. */ - linkedServiceName: LinkedServiceReference; - /** The path to storage for storing detailed logs of activity execution. Type: string (or Expression with resultType string). */ - path?: any; -} - /** A copy activity translator. */ export interface CopyTranslator { /** Polymorphic discriminator, which specifies the different types this object can be */ @@ -2803,6 +2976,22 @@ export interface CopyTranslator { [property: string]: any; } +/** Type conversion settings */ +export interface TypeConversionSettings { + /** Whether to allow data truncation when converting the data. Type: boolean (or Expression with resultType boolean). */ + allowDataTruncation?: any; + /** Whether to treat boolean values as numbers. Type: boolean (or Expression with resultType boolean). */ + treatBooleanAsNumber?: any; + /** The format for DateTime values. Type: string (or Expression with resultType string). */ + dateTimeFormat?: any; + /** The format for DateTimeOffset values. Type: string (or Expression with resultType string). */ + dateTimeOffsetFormat?: any; + /** The format for TimeSpan values. Type: string (or Expression with resultType string). */ + timeSpanFormat?: any; + /** The culture used to convert data from/to string. Type: string (or Expression with resultType string). */ + culture?: any; +} + /** SSIS package location. */ export interface SsisPackageLocation { /** The SSIS package path. Type: string (or Expression with resultType string). */ @@ -2815,6 +3004,8 @@ export interface SsisPackageLocation { accessCredential?: SsisAccessCredential; /** The configuration file of the package execution. Type: string (or Expression with resultType string). */ configurationPath?: any; + /** The configuration file access credential. */ + configurationAccessCredential?: SsisAccessCredential; /** The package name. */ packageName?: string; /** The embedded package content. Type: string (or Expression with resultType string). */ @@ -3104,6 +3295,14 @@ export interface CustomSetupBase { type: "undefined"; } +/** Managed Virtual Network reference type. */ +export interface ManagedVirtualNetworkReference { + /** Managed Virtual Network reference type. */ + type: "ManagedVirtualNetworkReference"; + /** Reference ManagedVirtualNetwork name. */ + referenceName: string; +} + /** The base definition of a linked integration runtime. */ export interface LinkedIntegrationRuntimeType { /** Polymorphic discriminator, which specifies the different types this object can be */ @@ -3146,6 +3345,8 @@ export type AzureBlobStorageLinkedService = LinkedService & { servicePrincipalKey?: SecretBaseUnion; /** The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */ tenant?: any; + /** Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */ + azureCloudType?: any; /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ encryptedCredential?: string; }; @@ -3180,6 +3381,8 @@ export type AzureSqlDWLinkedService = LinkedService & { servicePrincipalKey?: SecretBaseUnion; /** The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */ tenant?: any; + /** Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */ + azureCloudType?: any; /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ encryptedCredential?: any; }; @@ -3212,6 +3415,8 @@ export type AzureSqlDatabaseLinkedService = LinkedService & { servicePrincipalKey?: SecretBaseUnion; /** The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */ tenant?: any; + /** Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */ + azureCloudType?: any; /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ encryptedCredential?: any; }; @@ -3230,6 +3435,8 @@ export type AzureSqlMILinkedService = LinkedService & { servicePrincipalKey?: SecretBaseUnion; /** The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */ tenant?: any; + /** Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */ + azureCloudType?: any; /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ encryptedCredential?: any; }; @@ -3283,13 +3490,13 @@ export type DynamicsLinkedService = LinkedService & { /** The deployment type of the Dynamics instance. 'Online' for Dynamics Online and 'OnPremisesWithIfd' for Dynamics on-premises with Ifd. Type: string (or Expression with resultType string). */ deploymentType: DynamicsDeploymentType; /** The host name of the on-premises Dynamics server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). */ - hostName?: string; + hostName?: any; /** The port of on-premises Dynamics server. The property is required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), minimum: 0. */ - port?: string; + port?: any; /** The URL to the Microsoft Dynamics server. The property is required for on-line and not allowed for on-prem. Type: string (or Expression with resultType string). */ - serviceUri?: string; + serviceUri?: any; /** The organization name of the Dynamics instance. The property is required for on-prem and required for online when there are more than one Dynamics instances associated with the user. Type: string (or Expression with resultType string). */ - organizationName?: string; + organizationName?: any; /** The authentication type to connect to Dynamics server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or Expression with resultType string). */ authenticationType: DynamicsAuthenticationType; /** User name to access the Dynamics instance. Type: string (or Expression with resultType string). */ @@ -3328,8 +3535,8 @@ export type DynamicsCrmLinkedService = LinkedService & { password?: SecretBaseUnion; /** The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). */ servicePrincipalId?: any; - /** The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). */ - servicePrincipalCredentialType?: DynamicsServicePrincipalCredentialType; + /** A string from ServicePrincipalCredentialEnum or an expression */ + servicePrincipalCredentialType?: any; /** The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be AzureKeyVaultSecretReference. */ servicePrincipalCredential?: SecretBaseUnion; /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ @@ -3358,8 +3565,8 @@ export type CommonDataServiceForAppsLinkedService = LinkedService & { password?: SecretBaseUnion; /** The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). */ servicePrincipalId?: any; - /** The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). */ - servicePrincipalCredentialType?: DynamicsServicePrincipalCredentialType; + /** A string from ServicePrincipalCredentialEnum or an expression */ + servicePrincipalCredentialType?: any; /** The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be AzureKeyVaultSecretReference. */ servicePrincipalCredential?: SecretBaseUnion; /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ @@ -3412,6 +3619,18 @@ export type AzureFileStorageLinkedService = LinkedService & { userId?: any; /** Password to logon the server. */ password?: SecretBaseUnion; + /** The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or AzureKeyVaultSecretReference. */ + connectionString?: any; + /** The Azure key vault secret reference of accountKey in connection string. */ + accountKey?: AzureKeyVaultSecretReference; + /** SAS URI of the Azure File resource. It is mutually exclusive with connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. */ + sasUri?: any; + /** The Azure key vault secret reference of sasToken in sas uri. */ + sasToken?: AzureKeyVaultSecretReference; + /** The azure file share name. It is required when auth with accountKey/sasToken. Type: string (or Expression with resultType string). */ + fileShare?: any; + /** The azure file share snapshot version. Type: string (or Expression with resultType string). */ + snapshot?: any; /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ encryptedCredential?: any; }; @@ -3502,21 +3721,23 @@ export type SybaseLinkedService = LinkedService & { export type Db2LinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ type: "Db2"; - /** Server name for connection. Type: string (or Expression with resultType string). */ + /** The connection string. It is mutually exclusive with server, database, authenticationType, userName, packageCollection and certificateCommonName property. Type: string, SecureString or AzureKeyVaultSecretReference. */ + connectionString?: any; + /** Server name for connection. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). */ server: any; - /** Database name for connection. Type: string (or Expression with resultType string). */ + /** Database name for connection. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). */ database: any; - /** AuthenticationType to be used for connection. */ + /** AuthenticationType to be used for connection. It is mutually exclusive with connectionString property. */ authenticationType?: Db2AuthenticationType; - /** Username for authentication. Type: string (or Expression with resultType string). */ + /** Username for authentication. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). */ username?: any; /** Password for authentication. */ password?: SecretBaseUnion; - /** Under where packages are created when querying database. Type: string (or Expression with resultType string). */ + /** Under where packages are created when querying database. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). */ packageCollection?: any; - /** Certificate Common Name when TLS is enabled. Type: string (or Expression with resultType string). */ + /** Certificate Common Name when TLS is enabled. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). */ certificateCommonName?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). */ encryptedCredential?: any; }; @@ -3664,6 +3885,8 @@ export type ODataLinkedService = LinkedService & { tenant?: any; /** Specify the application id of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). */ servicePrincipalId?: any; + /** Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */ + azureCloudType?: any; /** Specify the resource you are requesting authorization to use Directory. Type: string (or Expression with resultType string). */ aadResourceId?: any; /** Specify the credential type (key or cert) is used for service principal. */ @@ -3730,6 +3953,16 @@ export type MongoDbLinkedService = LinkedService & { encryptedCredential?: any; }; +/** Linked service for MongoDB Atlas data source. */ +export type MongoDbAtlasLinkedService = LinkedService & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "MongoDbAtlas"; + /** The MongoDB Atlas connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. */ + connectionString: any; + /** The name of the MongoDB Atlas database that you want to access. Type: string (or Expression with resultType string). */ + database: any; +}; + /** Linked service for MongoDB data source. */ export type MongoDbV2LinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ @@ -3762,6 +3995,8 @@ export type AzureDataLakeStoreLinkedService = LinkedService & { servicePrincipalKey?: SecretBaseUnion; /** The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */ tenant?: any; + /** Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */ + azureCloudType?: any; /** Data Lake Store account name. Type: string (or Expression with resultType string). */ accountName?: any; /** Data Lake Store account subscription ID (if different from Data Factory account). Type: string (or Expression with resultType string). */ @@ -3786,6 +4021,8 @@ export type AzureBlobFSLinkedService = LinkedService & { servicePrincipalKey?: SecretBaseUnion; /** The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */ tenant?: any; + /** Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */ + azureCloudType?: any; /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ encryptedCredential?: any; }; @@ -3816,8 +4053,10 @@ export type SalesforceLinkedService = LinkedService & { username?: any; /** The password for Basic authentication of the Salesforce instance. */ password?: SecretBaseUnion; - /** The security token is required to remotely access Salesforce instance. */ + /** The security token is optional to remotely access Salesforce instance. */ securityToken?: SecretBaseUnion; + /** The Salesforce API version used in ADF. Type: string (or Expression with resultType string). */ + apiVersion?: any; /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ encryptedCredential?: any; }; @@ -3832,8 +4071,10 @@ export type SalesforceServiceCloudLinkedService = LinkedService & { username?: any; /** The password for Basic authentication of the Salesforce instance. */ password?: SecretBaseUnion; - /** The security token is required to remotely access Salesforce instance. */ + /** The security token is optional to remotely access Salesforce instance. */ securityToken?: SecretBaseUnion; + /** The Salesforce API version used in ADF. Type: string (or Expression with resultType string). */ + apiVersion?: any; /** Extended properties appended to the connection string. Type: string (or Expression with resultType string). */ extendedProperties?: any; /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ @@ -3880,10 +4121,18 @@ export type SapOpenHubLinkedService = LinkedService & { clientId: any; /** Language of the BW system where the open hub destination is located. The default value is EN. Type: string (or Expression with resultType string). */ language?: any; + /** SystemID of the SAP system where the table is located. Type: string (or Expression with resultType string). */ + systemId?: any; /** Username to access the SAP BW server where the open hub destination is located. Type: string (or Expression with resultType string). */ userName?: any; /** Password to access the SAP BW server where the open hub destination is located. */ password?: SecretBaseUnion; + /** The hostname of the SAP Message Server. Type: string (or Expression with resultType string). */ + messageServer?: any; + /** The service name or port number of the Message Server. Type: string (or Expression with resultType string). */ + messageServerService?: any; + /** The Logon Group for the SAP System. Type: string (or Expression with resultType string). */ + logonGroup?: any; /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ encryptedCredential?: any; }; @@ -3908,6 +4157,8 @@ export type RestServiceLinkedService = LinkedService & { servicePrincipalKey?: SecretBaseUnion; /** The tenant information (domain name or tenant ID) used in AadServicePrincipal authentication type under which your application resides. */ tenant?: any; + /** Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */ + azureCloudType?: any; /** The resource you are requesting authorization to use. */ aadResourceId?: any; /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ @@ -3918,12 +4169,16 @@ export type RestServiceLinkedService = LinkedService & { export type AmazonS3LinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ type: "AmazonS3"; + /** The authentication type of S3. Allowed value: AccessKey (default) or TemporarySecurityCredentials. Type: string (or Expression with resultType string). */ + authenticationType?: any; /** The access key identifier of the Amazon S3 Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). */ accessKeyId?: any; /** The secret access key of the Amazon S3 Identity and Access Management (IAM) user. */ secretAccessKey?: SecretBaseUnion; /** This value specifies the endpoint to access with the S3 Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). */ serviceUrl?: any; + /** The session token for the S3 temporary security credential. */ + sessionToken?: SecretBaseUnion; /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ encryptedCredential?: any; }; @@ -4116,6 +4371,8 @@ export type AzurePostgreSqlLinkedService = LinkedService & { export type ConcurLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ type: "Concur"; + /** Properties used to connect to Concur. It is mutually exclusive with any other properties in the linked service. Type: object. */ + connectionProperties?: any; /** Application client_id supplied by Concur App Management. */ clientId: any; /** The user name that you use to access Concur Service. */ @@ -4508,6 +4765,8 @@ export type PrestoLinkedService = LinkedService & { export type QuickBooksLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ type: "QuickBooks"; + /** Properties used to connect to QuickBooks. It is mutually exclusive with any other properties in the linked service. Type: object. */ + connectionProperties?: any; /** The endpoint of the QuickBooks server. (i.e. quickbooks.api.intuit.com) */ endpoint: any; /** The company ID of the QuickBooks company to authorize. */ @@ -4608,6 +4867,8 @@ export type SparkLinkedService = LinkedService & { export type SquareLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ type: "Square"; + /** Properties used to connect to Square. It is mutually exclusive with any other properties in the linked service. Type: object. */ + connectionProperties?: any; /** The URL of the Square instance. (i.e. mystore.mysquare.com) */ host: any; /** The client ID associated with your Square application. */ @@ -4630,6 +4891,8 @@ export type SquareLinkedService = LinkedService & { export type XeroLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ type: "Xero"; + /** Properties used to connect to Xero. It is mutually exclusive with any other properties in the linked service. Type: object. */ + connectionProperties?: any; /** The endpoint of the Xero server. (i.e. api.xero.com) */ host: any; /** The consumer key associated with the Xero application. */ @@ -4653,6 +4916,8 @@ export type XeroLinkedService = LinkedService & { export type ZohoLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ type: "Zoho"; + /** Properties used to connect to Zoho. It is mutually exclusive with any other properties in the linked service. Type: object. */ + connectionProperties?: any; /** The endpoint of the Zoho server. (i.e. crm.zoho.com/crm/private) */ endpoint: any; /** The access token for Zoho authentication. */ @@ -4695,6 +4960,8 @@ export type NetezzaLinkedService = LinkedService & { export type SalesforceMarketingCloudLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ type: "SalesforceMarketingCloud"; + /** Properties used to connect to Salesforce Marketing Cloud. It is mutually exclusive with any other properties in the linked service. Type: object. */ + connectionProperties?: any; /** The client ID associated with the Salesforce Marketing Cloud application. Type: string (or Expression with resultType string). */ clientId: any; /** The client secret associated with the Salesforce Marketing Cloud application. Type: string (or Expression with resultType string). */ @@ -4810,7 +5077,11 @@ export type AzureDatabricksLinkedService = LinkedService & { /** .azuredatabricks.net, domain name of your Databricks deployment. Type: string (or Expression with resultType string). */ domain: any; /** Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression with resultType string). */ - accessToken: SecretBaseUnion; + accessToken?: SecretBaseUnion; + /** Required to specify MSI, if using Workspace resource id for databricks REST API. Type: string (or Expression with resultType string). */ + authentication?: any; + /** Workspace resource id for databricks REST API. Type: string (or Expression with resultType string). */ + workspaceResourceId?: any; /** The id of an existing interactive cluster that will be used for all runs of this activity. Type: string (or Expression with resultType string). */ existingClusterId?: any; /** The id of an existing instance pool that will be used for all runs of this activity. Type: string (or Expression with resultType string). */ @@ -4827,6 +5098,8 @@ export type AzureDatabricksLinkedService = LinkedService & { newClusterSparkEnvVars?: { [propertyName: string]: any }; /** Additional tags for cluster resources. This property is ignored in instance pool configurations. */ newClusterCustomTags?: { [propertyName: string]: any }; + /** Specify a location to deliver Spark driver, worker, and event logs. Type: string (or Expression with resultType string). */ + newClusterLogDestination?: any; /** The driver node type for the new job cluster. This property is ignored in instance pool configurations. Type: string (or Expression with resultType string). */ newClusterDriverNodeType?: any; /** User-defined initialization scripts for the new cluster. Type: array of strings (or Expression with resultType array of strings). */ @@ -4835,6 +5108,22 @@ export type AzureDatabricksLinkedService = LinkedService & { newClusterEnableElasticDisk?: any; /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ encryptedCredential?: any; + /** The policy id for limiting the ability to configure clusters based on a user defined set of rules. Type: string (or Expression with resultType string). */ + policyId?: any; +}; + +/** Azure Databricks Delta Lake linked service. */ +export type AzureDatabricksDeltaLakeLinkedService = LinkedService & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "AzureDatabricksDeltaLake"; + /** .azuredatabricks.net, domain name of your Databricks deployment. Type: string (or Expression with resultType string). */ + domain: any; + /** Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string, SecureString or AzureKeyVaultSecretReference. */ + accessToken: SecretBaseUnion; + /** The id of an existing interactive cluster that will be used for all runs of this job. Type: string (or Expression with resultType string). */ + clusterId?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; /** Responsys linked service. */ @@ -4989,6 +5278,34 @@ export type AzureFunctionLinkedService = LinkedService & { encryptedCredential?: any; }; +/** Snowflake linked service. */ +export type SnowflakeLinkedService = LinkedService & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "Snowflake"; + /** The connection string of snowflake. Type: string, SecureString. */ + connectionString: any; + /** The Azure key vault secret reference of password in connection string. */ + password?: AzureKeyVaultSecretReference; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; +}; + +/** SharePoint Online List linked service. */ +export type SharePointOnlineListLinkedService = LinkedService & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "SharePointOnlineList"; + /** The URL of the SharePoint Online site. For example, https://contoso.sharepoint.com/sites/siteName. Type: string (or Expression with resultType string). */ + siteUrl: any; + /** The tenant ID under which your application resides. You can find it from Azure portal Active Directory overview page. Type: string (or Expression with resultType string). */ + tenantId: any; + /** The application (client) ID of your application registered in Azure Active Directory. Make sure to grant SharePoint site permission to this application. Type: string (or Expression with resultType string). */ + servicePrincipalId: any; + /** The client secret of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). */ + servicePrincipalKey: SecretBaseUnion; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; +}; + /** The resource model definition for an Azure Resource Manager resource with an etag. */ export type AzureEntityResource = Resource & { /** @@ -5009,23 +5326,65 @@ export type TrackedResource = Resource & { location: string; }; +/** A single Amazon Simple Storage Service (S3) object or a set of S3 objects. */ +export type AmazonS3Dataset = Dataset & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "AmazonS3Object"; + /** The name of the Amazon S3 bucket. Type: string (or Expression with resultType string). */ + bucketName: any; + /** The key of the Amazon S3 object. Type: string (or Expression with resultType string). */ + key?: any; + /** The prefix filter for the S3 object name. Type: string (or Expression with resultType string). */ + prefix?: any; + /** The version for the S3 object. Type: string (or Expression with resultType string). */ + version?: any; + /** The start of S3 object's modified datetime. Type: string (or Expression with resultType string). */ + modifiedDatetimeStart?: any; + /** The end of S3 object's modified datetime. Type: string (or Expression with resultType string). */ + modifiedDatetimeEnd?: any; + /** The format of files. */ + format?: DatasetStorageFormatUnion; + /** The data compression method used for the Amazon S3 object. */ + compression?: DatasetCompressionUnion; +}; + /** Avro dataset. */ export type AvroDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ type: "Avro"; /** The location of the avro storage. */ location?: DatasetLocationUnion; - avroCompressionCodec?: AvroCompressionCodec; + /** A string from AvroCompressionCodecEnum or an expression */ + avroCompressionCodec?: any; avroCompressionLevel?: number; }; +/** Excel dataset. */ +export type ExcelDataset = Dataset & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "Excel"; + /** The location of the excel storage. */ + location?: DatasetLocationUnion; + /** The sheet of excel file. Type: string (or Expression with resultType string). */ + sheetName?: any; + /** The partial data of one sheet. Type: string (or Expression with resultType string). */ + range?: any; + /** When used as input, treat the first row of data as headers. When used as output,write the headers into the output as the first row of data. The default value is false. Type: boolean (or Expression with resultType boolean). */ + firstRowAsHeader?: any; + /** The data compression method used for the json dataset. */ + compression?: DatasetCompressionUnion; + /** The null value string. Type: string (or Expression with resultType string). */ + nullValue?: any; +}; + /** Parquet dataset. */ export type ParquetDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ type: "Parquet"; /** The location of the parquet storage. */ location?: DatasetLocationUnion; - compressionCodec?: ParquetCompressionCodec; + /** A string from ParquetCompressionCodecEnum or an expression */ + compressionCodec?: any; }; /** Delimited text dataset. */ @@ -5040,9 +5399,9 @@ export type DelimitedTextDataset = Dataset & { rowDelimiter?: any; /** The code page name of the preferred encoding. If miss, the default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). */ encodingName?: any; - compressionCodec?: DelimitedTextCompressionCodec; + compressionCodec?: CompressionCodec; /** The data compression method used for DelimitedText. */ - compressionLevel?: DatasetCompressionLevel; + compressionLevel?: any; /** The quote character. Type: string (or Expression with resultType string). */ quoteChar?: any; /** The escape character. Type: string (or Expression with resultType string). */ @@ -5065,6 +5424,20 @@ export type JsonDataset = Dataset & { compression?: DatasetCompressionUnion; }; +/** Xml dataset. */ +export type XmlDataset = Dataset & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "Xml"; + /** The location of the json data storage. */ + location?: DatasetLocationUnion; + /** The code page name of the preferred encoding. If not specified, the default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). */ + encodingName?: any; + /** The null value string. Type: string (or Expression with resultType string). */ + nullValue?: any; + /** The data compression method used for the json dataset. */ + compression?: DatasetCompressionUnion; +}; + /** ORC dataset. */ export type OrcDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ @@ -5084,6 +5457,26 @@ export type BinaryDataset = Dataset & { compression?: DatasetCompressionUnion; }; +/** The Azure Blob storage. */ +export type AzureBlobDataset = Dataset & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "AzureBlob"; + /** The path of the Azure Blob storage. Type: string (or Expression with resultType string). */ + folderPath?: any; + /** The root of blob path. Type: string (or Expression with resultType string). */ + tableRootLocation?: any; + /** The name of the Azure Blob. Type: string (or Expression with resultType string). */ + fileName?: any; + /** The start of Azure Blob's modified datetime. Type: string (or Expression with resultType string). */ + modifiedDatetimeStart?: any; + /** The end of Azure Blob's modified datetime. Type: string (or Expression with resultType string). */ + modifiedDatetimeEnd?: any; + /** The format of the Azure Blob storage. */ + format?: DatasetStorageFormatUnion; + /** The data compression method used for the blob storage. */ + compression?: DatasetCompressionUnion; +}; + /** The Azure Table storage dataset. */ export type AzureTableDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ @@ -5186,6 +5579,34 @@ export type CommonDataServiceForAppsEntityDataset = Dataset & { entityName?: any; }; +/** Azure Data Lake Store dataset. */ +export type AzureDataLakeStoreDataset = Dataset & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "AzureDataLakeStoreFile"; + /** Path to the folder in the Azure Data Lake Store. Type: string (or Expression with resultType string). */ + folderPath?: any; + /** The name of the file in the Azure Data Lake Store. Type: string (or Expression with resultType string). */ + fileName?: any; + /** The format of the Data Lake Store. */ + format?: DatasetStorageFormatUnion; + /** The data compression method used for the item(s) in the Azure Data Lake Store. */ + compression?: DatasetCompressionUnion; +}; + +/** The Azure Data Lake Storage Gen2 storage. */ +export type AzureBlobFSDataset = Dataset & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "AzureBlobFSFile"; + /** The path of the Azure Data Lake Storage Gen2 storage. Type: string (or Expression with resultType string). */ + folderPath?: any; + /** The name of the Azure Data Lake Storage Gen2. Type: string (or Expression with resultType string). */ + fileName?: any; + /** The format of the Azure Data Lake Storage Gen2 storage. */ + format?: DatasetStorageFormatUnion; + /** The data compression method used for the blob storage. */ + compression?: DatasetCompressionUnion; +}; + /** The Office365 account. */ export type Office365Dataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ @@ -5196,6 +5617,26 @@ export type Office365Dataset = Dataset & { predicate?: any; }; +/** An on-premises file system dataset. */ +export type FileShareDataset = Dataset & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "FileShare"; + /** The path of the on-premises file system. Type: string (or Expression with resultType string). */ + folderPath?: any; + /** The name of the on-premises file system. Type: string (or Expression with resultType string). */ + fileName?: any; + /** The start of file's modified datetime. Type: string (or Expression with resultType string). */ + modifiedDatetimeStart?: any; + /** The end of file's modified datetime. Type: string (or Expression with resultType string). */ + modifiedDatetimeEnd?: any; + /** The format of the files. */ + format?: DatasetStorageFormatUnion; + /** Specify a filter to be used to select a subset of files in the folderPath rather than all files. Type: string (or Expression with resultType string). */ + fileFilter?: any; + /** The data compression method used for the file system. */ + compression?: DatasetCompressionUnion; +}; + /** The MongoDB database dataset. */ export type MongoDbCollectionDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ @@ -5204,6 +5645,14 @@ export type MongoDbCollectionDataset = Dataset & { collectionName: any; }; +/** The MongoDB Atlas database dataset. */ +export type MongoDbAtlasCollectionDataset = Dataset & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "MongoDbAtlasCollection"; + /** The collection name of the MongoDB Atlas database. Type: string (or Expression with resultType string). */ + collection: any; +}; + /** The MongoDB database dataset. */ export type MongoDbV2CollectionDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ @@ -5458,6 +5907,28 @@ export type AzureSearchIndexDataset = Dataset & { indexName: any; }; +/** A file in an HTTP web server. */ +export type HttpDataset = Dataset & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "HttpFile"; + /** The relative URL based on the URL in the HttpLinkedService refers to an HTTP file Type: string (or Expression with resultType string). */ + relativeUrl?: any; + /** The HTTP method for the HTTP request. Type: string (or Expression with resultType string). */ + requestMethod?: any; + /** The body for the HTTP request. Type: string (or Expression with resultType string). */ + requestBody?: any; + /** + * The headers for the HTTP Request. e.g. request-header-name-1:request-header-value-1 + * ... + * request-header-name-n:request-header-value-n Type: string (or Expression with resultType string). + */ + additionalHeaders?: any; + /** The format of files. */ + format?: DatasetStorageFormatUnion; + /** The data compression method used on files. */ + compression?: DatasetCompressionUnion; +}; + /** Amazon Marketplace Web Service dataset. */ export type AmazonMWSObjectDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ @@ -5782,6 +6253,34 @@ export type GoogleAdWordsObjectDataset = Dataset & { tableName?: any; }; +/** The snowflake dataset. */ +export type SnowflakeDataset = Dataset & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "SnowflakeTable"; + /** The schema name of the Snowflake database. Type: string (or Expression with resultType string). */ + schemaTypePropertiesSchema?: any; + /** The table name of the Snowflake database. Type: string (or Expression with resultType string). */ + table?: any; +}; + +/** The sharepoint online list resource dataset. */ +export type SharePointOnlineListResourceDataset = Dataset & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "SharePointOnlineListResource"; + /** The name of the SharePoint Online list. Type: string (or Expression with resultType string). */ + listName?: any; +}; + +/** Azure Databricks Delta Lake dataset. */ +export type AzureDatabricksDeltaLakeDataset = Dataset & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "AzureDatabricksDeltaLakeDataset"; + /** The name of delta table. Type: string (or Expression with resultType string). */ + table?: any; + /** The database name of delta table. Type: string (or Expression with resultType string). */ + database?: any; +}; + /** Base class for all control activities like IfCondition, ForEach , Until. */ export type ControlActivity = Activity & { /** Polymorphic discriminator, which specifies the different types this object can be */ @@ -5817,13 +6316,13 @@ export type RerunTumblingWindowTrigger = Trigger & { /** Polymorphic discriminator, which specifies the different types this object can be */ type: "RerunTumblingWindowTrigger"; /** The parent trigger reference. */ - parentTrigger?: any; + parentTrigger: any; /** The start time for the time period for which restatement is initiated. Only UTC time is currently supported. */ requestedStartTime: Date; /** The end time for the time period for which restatement is initiated. Only UTC time is currently supported. */ requestedEndTime: Date; /** The max number of parallel time windows (ready for execution) for which a rerun is triggered. */ - maxConcurrency: number; + rerunConcurrency: number; }; /** Base class for all triggers that support one to many model for trigger to pipeline. */ @@ -5911,6 +6410,8 @@ export type ManagedIntegrationRuntime = IntegrationRuntime & { * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly state?: IntegrationRuntimeState; + /** Managed Virtual Network reference. */ + managedVirtualNetwork?: ManagedVirtualNetworkReference; /** The compute resource for managed integration runtime. */ computeProperties?: IntegrationRuntimeComputeProperties; /** SSIS properties for managed integration runtime. */ @@ -5949,12 +6450,20 @@ export type AzureKeyVaultSecretReference = SecretBase & { export type DataFlowSource = Transformation & { /** Dataset reference. */ dataset?: DatasetReference; + /** Linked service reference. */ + linkedService?: LinkedServiceReference; + /** Schema linked service reference. */ + schemaLinkedService?: LinkedServiceReference; }; /** Transformation for data flow sink. */ export type DataFlowSink = Transformation & { /** Dataset reference. */ dataset?: DatasetReference; + /** Linked service reference. */ + linkedService?: LinkedServiceReference; + /** Schema linked service reference. */ + schemaLinkedService?: LinkedServiceReference; }; /** The location of azure blob dataset. */ @@ -6106,7 +6615,7 @@ export type DatasetGZipCompression = DatasetCompression & { /** Polymorphic discriminator, which specifies the different types this object can be */ type: "GZip"; /** The GZip compression level. */ - level?: DatasetCompressionLevel; + level?: any; }; /** The Deflate compression method used on a dataset. */ @@ -6114,7 +6623,7 @@ export type DatasetDeflateCompression = DatasetCompression & { /** Polymorphic discriminator, which specifies the different types this object can be */ type: "Deflate"; /** The Deflate compression level. */ - level?: DatasetCompressionLevel; + level?: any; }; /** The ZipDeflate compression method used on a dataset. */ @@ -6122,7 +6631,21 @@ export type DatasetZipDeflateCompression = DatasetCompression & { /** Polymorphic discriminator, which specifies the different types this object can be */ type: "ZipDeflate"; /** The ZipDeflate compression level. */ - level?: DatasetCompressionLevel; + level?: any; +}; + +/** The Tar archive method used on a dataset. */ +export type DatasetTarCompression = DatasetCompression & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "Tar"; +}; + +/** The TarGZip compression method used on a dataset. */ +export type DatasetTarGZipCompression = DatasetCompression & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "TarGZip"; + /** The TarGZip compression level. */ + level?: any; }; /** A WebLinkedService that uses anonymous authentication to communicate with an HTTP endpoint. */ @@ -6163,8 +6686,14 @@ export type AzureBlobStorageReadSettings = StoreReadSettings & { wildcardFileName?: any; /** The prefix filter for the Azure Blob name. Type: string (or Expression with resultType string). */ prefix?: any; + /** Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). */ + fileListPath?: any; /** Indicates whether to enable partition discovery. */ enablePartitionDiscovery?: boolean; + /** Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */ + partitionRootPath?: any; + /** Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). */ + deleteFilesAfterCompletion?: any; /** The start of file's modified datetime. Type: string (or Expression with resultType string). */ modifiedDatetimeStart?: any; /** The end of file's modified datetime. Type: string (or Expression with resultType string). */ @@ -6181,8 +6710,14 @@ export type AzureBlobFSReadSettings = StoreReadSettings & { wildcardFolderPath?: any; /** Azure blobFS wildcardFileName. Type: string (or Expression with resultType string). */ wildcardFileName?: any; + /** Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). */ + fileListPath?: any; /** Indicates whether to enable partition discovery. */ enablePartitionDiscovery?: boolean; + /** Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */ + partitionRootPath?: any; + /** Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). */ + deleteFilesAfterCompletion?: any; /** The start of file's modified datetime. Type: string (or Expression with resultType string). */ modifiedDatetimeStart?: any; /** The end of file's modified datetime. Type: string (or Expression with resultType string). */ @@ -6199,8 +6734,18 @@ export type AzureDataLakeStoreReadSettings = StoreReadSettings & { wildcardFolderPath?: any; /** ADLS wildcardFileName. Type: string (or Expression with resultType string). */ wildcardFileName?: any; + /** Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). */ + fileListPath?: any; + /** Lists files after the value (exclusive) based on file/folder names’ lexicographical order. Applies under the folderPath in data set, and filter files/sub-folders under the folderPath. Type: string (or Expression with resultType string). */ + listAfter?: any; + /** Lists files before the value (inclusive) based on file/folder names’ lexicographical order. Applies under the folderPath in data set, and filter files/sub-folders under the folderPath. Type: string (or Expression with resultType string). */ + listBefore?: any; /** Indicates whether to enable partition discovery. */ enablePartitionDiscovery?: boolean; + /** Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */ + partitionRootPath?: any; + /** Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). */ + deleteFilesAfterCompletion?: any; /** The start of file's modified datetime. Type: string (or Expression with resultType string). */ modifiedDatetimeStart?: any; /** The end of file's modified datetime. Type: string (or Expression with resultType string). */ @@ -6219,8 +6764,14 @@ export type AmazonS3ReadSettings = StoreReadSettings & { wildcardFileName?: any; /** The prefix filter for the S3 object name. Type: string (or Expression with resultType string). */ prefix?: any; + /** Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). */ + fileListPath?: any; /** Indicates whether to enable partition discovery. */ enablePartitionDiscovery?: boolean; + /** Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */ + partitionRootPath?: any; + /** Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). */ + deleteFilesAfterCompletion?: any; /** The start of file's modified datetime. Type: string (or Expression with resultType string). */ modifiedDatetimeStart?: any; /** The end of file's modified datetime. Type: string (or Expression with resultType string). */ @@ -6237,12 +6788,20 @@ export type FileServerReadSettings = StoreReadSettings & { wildcardFolderPath?: any; /** FileServer wildcardFileName. Type: string (or Expression with resultType string). */ wildcardFileName?: any; + /** Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). */ + fileListPath?: any; /** Indicates whether to enable partition discovery. */ enablePartitionDiscovery?: boolean; + /** Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */ + partitionRootPath?: any; + /** Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). */ + deleteFilesAfterCompletion?: any; /** The start of file's modified datetime. Type: string (or Expression with resultType string). */ modifiedDatetimeStart?: any; /** The end of file's modified datetime. Type: string (or Expression with resultType string). */ modifiedDatetimeEnd?: any; + /** Specify a filter to be used to select a subset of files in the folderPath rather than all files. Type: string (or Expression with resultType string). */ + fileFilter?: any; }; /** Azure File Storage read settings. */ @@ -6255,8 +6814,16 @@ export type AzureFileStorageReadSettings = StoreReadSettings & { wildcardFolderPath?: any; /** Azure File Storage wildcardFileName. Type: string (or Expression with resultType string). */ wildcardFileName?: any; + /** The prefix filter for the Azure File name starting from root path. Type: string (or Expression with resultType string). */ + prefix?: any; + /** Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). */ + fileListPath?: any; /** Indicates whether to enable partition discovery. */ enablePartitionDiscovery?: boolean; + /** Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */ + partitionRootPath?: any; + /** Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). */ + deleteFilesAfterCompletion?: any; /** The start of file's modified datetime. Type: string (or Expression with resultType string). */ modifiedDatetimeStart?: any; /** The end of file's modified datetime. Type: string (or Expression with resultType string). */ @@ -6275,8 +6842,14 @@ export type GoogleCloudStorageReadSettings = StoreReadSettings & { wildcardFileName?: any; /** The prefix filter for the Google Cloud Storage object name. Type: string (or Expression with resultType string). */ prefix?: any; + /** Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). */ + fileListPath?: any; /** Indicates whether to enable partition discovery. */ enablePartitionDiscovery?: boolean; + /** Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */ + partitionRootPath?: any; + /** Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). */ + deleteFilesAfterCompletion?: any; /** The start of file's modified datetime. Type: string (or Expression with resultType string). */ modifiedDatetimeStart?: any; /** The end of file's modified datetime. Type: string (or Expression with resultType string). */ @@ -6293,6 +6866,14 @@ export type FtpReadSettings = StoreReadSettings & { wildcardFolderPath?: any; /** Ftp wildcardFileName. Type: string (or Expression with resultType string). */ wildcardFileName?: any; + /** Indicates whether to enable partition discovery. */ + enablePartitionDiscovery?: boolean; + /** Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */ + partitionRootPath?: any; + /** Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). */ + deleteFilesAfterCompletion?: any; + /** Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). */ + fileListPath?: any; /** Specify whether to use binary transfer mode for FTP stores. */ useBinaryTransfer?: boolean; }; @@ -6307,6 +6888,14 @@ export type SftpReadSettings = StoreReadSettings & { wildcardFolderPath?: any; /** Sftp wildcardFileName. Type: string (or Expression with resultType string). */ wildcardFileName?: any; + /** Indicates whether to enable partition discovery. */ + enablePartitionDiscovery?: boolean; + /** Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */ + partitionRootPath?: any; + /** Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). */ + fileListPath?: any; + /** Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). */ + deleteFilesAfterCompletion?: any; /** The start of file's modified datetime. Type: string (or Expression with resultType string). */ modifiedDatetimeStart?: any; /** The end of file's modified datetime. Type: string (or Expression with resultType string). */ @@ -6325,6 +6914,10 @@ export type HttpReadSettings = StoreReadSettings & { additionalHeaders?: any; /** Specifies the timeout for a HTTP client to get HTTP response from HTTP server. */ requestTimeout?: any; + /** Indicates whether to enable partition discovery. */ + enablePartitionDiscovery?: boolean; + /** Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */ + partitionRootPath?: any; }; /** HDFS read settings. */ @@ -6337,14 +6930,20 @@ export type HdfsReadSettings = StoreReadSettings & { wildcardFolderPath?: any; /** HDFS wildcardFileName. Type: string (or Expression with resultType string). */ wildcardFileName?: any; + /** Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). */ + fileListPath?: any; /** Indicates whether to enable partition discovery. */ enablePartitionDiscovery?: boolean; + /** Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). */ + partitionRootPath?: any; /** The start of file's modified datetime. Type: string (or Expression with resultType string). */ modifiedDatetimeStart?: any; /** The end of file's modified datetime. Type: string (or Expression with resultType string). */ modifiedDatetimeEnd?: any; /** Specifies Distcp-related settings. */ distcpSettings?: DistcpSettings; + /** Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). */ + deleteFilesAfterCompletion?: any; }; /** Sftp write settings. */ @@ -6353,6 +6952,8 @@ export type SftpWriteSettings = StoreWriteSettings & { type: "SftpWriteSettings"; /** Specifies the timeout for writing each chunk to SFTP server. Default value: 01:00:00 (one hour). Type: string (or Expression with resultType string). */ operationTimeout?: any; + /** Upload to temporary file(s) and rename. Disable this option if your SFTP server doesn't support rename operation. Type: boolean (or Expression with resultType boolean). */ + useTempFileRename?: any; }; /** Azure blob write settings. */ @@ -6375,6 +6976,8 @@ export type AzureBlobFSWriteSettings = StoreWriteSettings & { export type AzureDataLakeStoreWriteSettings = StoreWriteSettings & { /** Polymorphic discriminator, which specifies the different types this object can be */ type: "AzureDataLakeStoreWriteSettings"; + /** Specifies the expiry time of the written files. The time is applied to the UTC time zone in the format of "2018-12-01T05:00:00Z". Default value is NULL. Type: integer (or Expression with resultType integer). */ + expiryDateTime?: any; }; /** File server write settings. */ @@ -6383,12 +6986,76 @@ export type FileServerWriteSettings = StoreWriteSettings & { type: "FileServerWriteSettings"; }; +/** Azure File Storage write settings. */ +export type AzureFileStorageWriteSettings = StoreWriteSettings & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "AzureFileStorageWriteSettings"; +}; + /** Delimited text read settings. */ export type DelimitedTextReadSettings = FormatReadSettings & { /** Polymorphic discriminator, which specifies the different types this object can be */ type: "DelimitedTextReadSettings"; /** Indicates the number of non-empty rows to skip when reading data from input files. Type: integer (or Expression with resultType integer). */ skipLineCount?: any; + /** Compression settings. */ + compressionProperties?: CompressionReadSettingsUnion; +}; + +/** Json read settings. */ +export type JsonReadSettings = FormatReadSettings & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "JsonReadSettings"; + /** Compression settings. */ + compressionProperties?: CompressionReadSettingsUnion; +}; + +/** Xml read settings. */ +export type XmlReadSettings = FormatReadSettings & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "XmlReadSettings"; + /** Compression settings. */ + compressionProperties?: CompressionReadSettingsUnion; + /** Indicates what validation method is used when reading the xml files. Allowed values: 'none', 'xsd', or 'dtd'. Type: string (or Expression with resultType string). */ + validationMode?: any; + /** Indicates whether type detection is enabled when reading the xml files. Type: boolean (or Expression with resultType boolean). */ + detectDataType?: any; + /** Indicates whether namespace is enabled when reading the xml files. Type: boolean (or Expression with resultType boolean). */ + namespaces?: any; + /** Namespace uri to prefix mappings to override the prefixes in column names when namespace is enabled, if no prefix is defined for a namespace uri, the prefix of xml element/attribute name in the xml data file will be used. Example: "{"http://www.example.com/xml":"prefix"}" Type: object (or Expression with resultType object). */ + namespacePrefixes?: any; +}; + +/** Binary read settings. */ +export type BinaryReadSettings = FormatReadSettings & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "BinaryReadSettings"; + /** Compression settings. */ + compressionProperties?: CompressionReadSettingsUnion; +}; + +/** The ZipDeflate compression read settings. */ +export type ZipDeflateReadSettings = CompressionReadSettings & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "ZipDeflateReadSettings"; + /** Preserve the zip file name as folder path. Type: boolean (or Expression with resultType boolean). */ + preserveZipFileNameAsFolder?: any; +}; + +/** The Tar compression read settings. */ +export type TarReadSettings = CompressionReadSettings & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "TarReadSettings"; + /** Preserve the compression file name as folder path. Type: boolean (or Expression with resultType boolean). */ + preserveCompressionFileNameAsFolder?: any; +}; + +/** The TarGZip compression read settings. */ +export type TarGZipReadSettings = CompressionReadSettings & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "TarGZipReadSettings"; + /** Preserve the compression file name as folder path. Type: boolean (or Expression with resultType boolean). */ + preserveCompressionFileNameAsFolder?: any; }; /** Avro write settings. */ @@ -6399,6 +7066,30 @@ export type AvroWriteSettings = FormatWriteSettings & { recordName?: string; /** Record namespace in the write result. */ recordNamespace?: string; + /** Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). */ + maxRowsPerFile?: any; + /** Specifies the file name pattern _. when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). */ + fileNamePrefix?: any; +}; + +/** Orc write settings. */ +export type OrcWriteSettings = FormatWriteSettings & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "OrcWriteSettings"; + /** Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). */ + maxRowsPerFile?: any; + /** Specifies the file name pattern _. when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). */ + fileNamePrefix?: any; +}; + +/** Parquet write settings. */ +export type ParquetWriteSettings = FormatWriteSettings & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "ParquetWriteSettings"; + /** Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). */ + maxRowsPerFile?: any; + /** Specifies the file name pattern _. when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). */ + fileNamePrefix?: any; }; /** Delimited text write settings. */ @@ -6409,6 +7100,10 @@ export type DelimitedTextWriteSettings = FormatWriteSettings & { quoteAllText?: any; /** The file extension used to create the files. Type: string (or Expression with resultType string). */ fileExtension: any; + /** Limit the written file's row count to be smaller than or equal to the specified count. Type: integer (or Expression with resultType integer). */ + maxRowsPerFile?: any; + /** Specifies the file name pattern _. when copy from non-file based store without partitionOptions. Type: string (or Expression with resultType string). */ + fileNamePrefix?: any; }; /** Json write settings. */ @@ -6425,6 +7120,18 @@ export type AvroSource = CopySource & { type: "AvroSource"; /** Avro store settings. */ storeSettings?: StoreReadSettingsUnion; + /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ + additionalColumns?: AdditionalColumns[]; +}; + +/** A copy activity excel source. */ +export type ExcelSource = CopySource & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "ExcelSource"; + /** Excel store settings. */ + storeSettings?: StoreReadSettingsUnion; + /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ + additionalColumns?: AdditionalColumns[]; }; /** A copy activity Parquet source. */ @@ -6433,6 +7140,8 @@ export type ParquetSource = CopySource & { type: "ParquetSource"; /** Parquet store settings. */ storeSettings?: StoreReadSettingsUnion; + /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ + additionalColumns?: AdditionalColumns[]; }; /** A copy activity DelimitedText source. */ @@ -6443,6 +7152,8 @@ export type DelimitedTextSource = CopySource & { storeSettings?: StoreReadSettingsUnion; /** DelimitedText format settings. */ formatSettings?: DelimitedTextReadSettings; + /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ + additionalColumns?: AdditionalColumns[]; }; /** A copy activity Json source. */ @@ -6451,6 +7162,22 @@ export type JsonSource = CopySource & { type: "JsonSource"; /** Json store settings. */ storeSettings?: StoreReadSettingsUnion; + /** Json format settings. */ + formatSettings?: JsonReadSettings; + /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ + additionalColumns?: AdditionalColumns[]; +}; + +/** A copy activity Xml source. */ +export type XmlSource = CopySource & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "XmlSource"; + /** Xml store settings. */ + storeSettings?: StoreReadSettingsUnion; + /** Xml format settings. */ + formatSettings?: XmlReadSettings; + /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ + additionalColumns?: AdditionalColumns[]; }; /** A copy activity ORC source. */ @@ -6459,6 +7186,8 @@ export type OrcSource = CopySource & { type: "OrcSource"; /** ORC store settings. */ storeSettings?: StoreReadSettingsUnion; + /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ + additionalColumns?: AdditionalColumns[]; }; /** A copy activity Binary source. */ @@ -6467,6 +7196,8 @@ export type BinarySource = CopySource & { type: "BinarySource"; /** Binary store settings. */ storeSettings?: StoreReadSettingsUnion; + /** Binary format settings. */ + formatSettings?: BinaryReadSettings; }; /** Copy activity sources of tabular type. */ @@ -6475,6 +7206,8 @@ export type TabularSource = CopySource & { type: "TabularSource"; /** Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ queryTimeout?: any; + /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ + additionalColumns?: AdditionalColumns[]; }; /** A copy activity Azure Blob source. */ @@ -6499,6 +7232,8 @@ export type DocumentDbCollectionSource = CopySource & { nestingSeparator?: any; /** Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ queryTimeout?: any; + /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ + additionalColumns?: AdditionalColumns[]; }; /** A copy activity Azure CosmosDB (SQL API) Collection source. */ @@ -6511,6 +7246,10 @@ export type CosmosDbSqlApiSource = CopySource & { pageSize?: any; /** Preferred regions. Type: array of strings (or Expression with resultType array of strings). */ preferredRegions?: any; + /** Whether detect primitive values as datetime values. Type: boolean (or Expression with resultType boolean). */ + detectDatetime?: any; + /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ + additionalColumns?: AdditionalColumns[]; }; /** A copy activity Dynamics source. */ @@ -6519,6 +7258,8 @@ export type DynamicsSource = CopySource & { type: "DynamicsSource"; /** FetchXML is a proprietary query language that is used in Microsoft Dynamics (online & on-premises). Type: string (or Expression with resultType string). */ query?: any; + /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ + additionalColumns?: AdditionalColumns[]; }; /** A copy activity Dynamics CRM source. */ @@ -6527,6 +7268,8 @@ export type DynamicsCrmSource = CopySource & { type: "DynamicsCrmSource"; /** FetchXML is a proprietary query language that is used in Microsoft Dynamics CRM (online & on-premises). Type: string (or Expression with resultType string). */ query?: any; + /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ + additionalColumns?: AdditionalColumns[]; }; /** A copy activity Common Data Service for Apps source. */ @@ -6535,6 +7278,8 @@ export type CommonDataServiceForAppsSource = CopySource & { type: "CommonDataServiceForAppsSource"; /** FetchXML is a proprietary query language that is used in Microsoft Common Data Service for Apps (online & on-premises). Type: string (or Expression with resultType string). */ query?: any; + /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ + additionalColumns?: AdditionalColumns[]; }; /** A copy activity source for various relational databases. */ @@ -6543,6 +7288,8 @@ export type RelationalSource = CopySource & { type: "RelationalSource"; /** Database query. Type: string (or Expression with resultType string). */ query?: any; + /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ + additionalColumns?: AdditionalColumns[]; }; /** A copy activity source for Microsoft Access. */ @@ -6551,6 +7298,8 @@ export type MicrosoftAccessSource = CopySource & { type: "MicrosoftAccessSource"; /** Database query. Type: string (or Expression with resultType string). */ query?: any; + /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ + additionalColumns?: AdditionalColumns[]; }; /** A copy activity source for OData source. */ @@ -6559,6 +7308,10 @@ export type ODataSource = CopySource & { type: "ODataSource"; /** OData query. For example, "$top=1". Type: string (or Expression with resultType string). */ query?: any; + /** The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ + httpRequestTimeout?: any; + /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ + additionalColumns?: AdditionalColumns[]; }; /** A copy activity Salesforce Service Cloud source. */ @@ -6569,6 +7322,8 @@ export type SalesforceServiceCloudSource = CopySource & { query?: any; /** The read behavior for the operation. Default is Query. */ readBehavior?: SalesforceSourceReadBehavior; + /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ + additionalColumns?: AdditionalColumns[]; }; /** A copy activity Rest service source. */ @@ -6587,6 +7342,8 @@ export type RestSource = CopySource & { httpRequestTimeout?: any; /** The time to await before sending next page request. */ requestInterval?: any; + /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ + additionalColumns?: AdditionalColumns[]; }; /** A copy activity file system source. */ @@ -6595,6 +7352,8 @@ export type FileSystemSource = CopySource & { type: "FileSystemSource"; /** If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */ recursive?: any; + /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ + additionalColumns?: AdditionalColumns[]; }; /** A copy activity HDFS source. */ @@ -6617,6 +7376,8 @@ export type AzureDataExplorerSource = CopySource & { noTruncation?: any; /** Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. */ queryTimeout?: any; + /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ + additionalColumns?: AdditionalColumns[]; }; /** A copy activity Oracle source. */ @@ -6631,12 +7392,16 @@ export type OracleSource = CopySource & { partitionOption?: OraclePartitionOption; /** The settings that will be leveraged for Oracle source partitioning. */ partitionSettings?: OraclePartitionSettings; + /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ + additionalColumns?: AdditionalColumns[]; }; /** A copy activity source for web page table. */ export type WebSource = CopySource & { /** Polymorphic discriminator, which specifies the different types this object can be */ type: "WebSource"; + /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ + additionalColumns?: AdditionalColumns[]; }; /** A copy activity source for a MongoDB database. */ @@ -6645,6 +7410,24 @@ export type MongoDbSource = CopySource & { type: "MongoDbSource"; /** Database query. Should be a SQL-92 query expression. Type: string (or Expression with resultType string). */ query?: any; + /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ + additionalColumns?: AdditionalColumns[]; +}; + +/** A copy activity source for a MongoDB Atlas database. */ +export type MongoDbAtlasSource = CopySource & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "MongoDbAtlasSource"; + /** Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). */ + filter?: any; + /** Cursor methods for Mongodb query */ + cursorMethods?: MongoDbCursorMethodsProperties; + /** Specifies the number of documents to return in each batch of the response from MongoDB Atlas instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). */ + batchSize?: any; + /** Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ + queryTimeout?: any; + /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ + additionalColumns?: AdditionalColumns[]; }; /** A copy activity source for a MongoDB database. */ @@ -6659,6 +7442,8 @@ export type MongoDbV2Source = CopySource & { batchSize?: any; /** Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ queryTimeout?: any; + /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ + additionalColumns?: AdditionalColumns[]; }; /** A copy activity source for a CosmosDB (MongoDB API) database. */ @@ -6673,6 +7458,8 @@ export type CosmosDbMongoDbApiSource = CopySource & { batchSize?: any; /** Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ queryTimeout?: any; + /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ + additionalColumns?: AdditionalColumns[]; }; /** A copy activity source for an Office 365 service. */ @@ -6721,6 +7508,36 @@ export type HttpSource = CopySource & { httpRequestTimeout?: any; }; +/** A copy activity snowflake source. */ +export type SnowflakeSource = CopySource & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "SnowflakeSource"; + /** Snowflake Sql query. Type: string (or Expression with resultType string). */ + query?: any; + /** Snowflake export settings. */ + exportSettings?: SnowflakeExportCopyCommand; +}; + +/** A copy activity Azure Databricks Delta Lake source. */ +export type AzureDatabricksDeltaLakeSource = CopySource & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "AzureDatabricksDeltaLakeSource"; + /** Azure Databricks Delta Lake Sql query. Type: string (or Expression with resultType string). */ + query?: any; + /** Azure Databricks Delta Lake export settings. */ + exportSettings?: AzureDatabricksDeltaLakeExportCommand; +}; + +/** A copy activity source for sharePoint online list source. */ +export type SharePointOnlineListSource = CopySource & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "SharePointOnlineListSource"; + /** The OData query to filter the data in SharePoint Online list. For example, "$top=1". Type: string (or Expression with resultType string). */ + query?: any; + /** The wait time to get a response from SharePoint Online. Default value is 5 minutes (00:05:00). Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ + httpRequestTimeout?: any; +}; + /** A copy activity DelimitedText sink. */ export type DelimitedTextSink = CopySink & { /** Polymorphic discriminator, which specifies the different types this object can be */ @@ -6747,6 +7564,24 @@ export type OrcSink = CopySink & { type: "OrcSink"; /** ORC store settings. */ storeSettings?: StoreWriteSettingsUnion; + /** ORC format settings. */ + formatSettings?: OrcWriteSettings; +}; + +/** A copy activity Rest service Sink. */ +export type RestSink = CopySink & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "RestSink"; + /** The HTTP method used to call the RESTful API. The default is POST. Type: string (or Expression with resultType string). */ + requestMethod?: any; + /** The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). */ + additionalHeaders?: any; + /** The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:01:40. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ + httpRequestTimeout?: any; + /** The time to await before sending next request, in milliseconds */ + requestInterval?: any; + /** Http Compression Type to Send data in compressed format with Optimal Compression Level, Default is None. And The Only Supported option is Gzip. */ + httpCompressionType?: any; }; /** A copy activity Azure PostgreSQL sink. */ @@ -6765,12 +7600,24 @@ export type AzureMySqlSink = CopySink & { preCopyScript?: any; }; +/** A copy activity Azure Databricks Delta Lake sink. */ +export type AzureDatabricksDeltaLakeSink = CopySink & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "AzureDatabricksDeltaLakeSink"; + /** SQL pre-copy script. Type: string (or Expression with resultType string). */ + preCopyScript?: any; + /** Azure Databricks Delta Lake import settings. */ + importSettings?: AzureDatabricksDeltaLakeImportCommand; +}; + /** A copy activity SAP Cloud for Customer sink. */ export type SapCloudForCustomerSink = CopySink & { /** Polymorphic discriminator, which specifies the different types this object can be */ type: "SapCloudForCustomerSink"; /** The write behavior for the operation. Default is 'Insert'. */ writeBehavior?: SapCloudForCustomerSinkWriteBehavior; + /** The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ + httpRequestTimeout?: any; }; /** A copy activity Azure Queue sink. */ @@ -6809,6 +7656,8 @@ export type ParquetSink = CopySink & { type: "ParquetSink"; /** Parquet store settings. */ storeSettings?: StoreWriteSettingsUnion; + /** Parquet format settings. */ + formatSettings?: ParquetWriteSettings; }; /** A copy activity Binary sink. */ @@ -6957,6 +7806,16 @@ export type SqlDWSink = CopySink & { tableOption?: any; }; +/** A copy activity snowflake sink. */ +export type SnowflakeSink = CopySink & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "SnowflakeSink"; + /** SQL pre-copy script. Type: string (or Expression with resultType string). */ + preCopyScript?: any; + /** Snowflake import settings. */ + importSettings?: SnowflakeImportCopyCommand; +}; + /** A copy activity Oracle sink. */ export type OracleSink = CopySink & { /** Polymorphic discriminator, which specifies the different types this object can be */ @@ -7095,6 +7954,46 @@ export type CosmosDbMongoDbApiSink = CopySink & { writeBehavior?: any; }; +/** Snowflake export command settings. */ +export type SnowflakeExportCopyCommand = ExportSettings & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "SnowflakeExportCopyCommand"; + /** Additional copy options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalCopyOptions": { "DATE_FORMAT": "MM/DD/YYYY", "TIME_FORMAT": "'HH24:MI:SS.FF'" } */ + additionalCopyOptions?: { [propertyName: string]: any }; + /** Additional format options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalFormatOptions": { "OVERWRITE": "TRUE", "MAX_FILE_SIZE": "'FALSE'" } */ + additionalFormatOptions?: { [propertyName: string]: any }; +}; + +/** Azure Databricks Delta Lake export command settings. */ +export type AzureDatabricksDeltaLakeExportCommand = ExportSettings & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "AzureDatabricksDeltaLakeExportCommand"; + /** Specify the date format for the csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType string). */ + dateFormat?: any; + /** Specify the timestamp format for the csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType string). */ + timestampFormat?: any; +}; + +/** Azure Databricks Delta Lake import command settings. */ +export type AzureDatabricksDeltaLakeImportCommand = ImportSettings & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "AzureDatabricksDeltaLakeImportCommand"; + /** Specify the date format for csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType string). */ + dateFormat?: any; + /** Specify the timestamp format for csv in Azure Databricks Delta Lake Copy. Type: string (or Expression with resultType string). */ + timestampFormat?: any; +}; + +/** Snowflake import command settings. */ +export type SnowflakeImportCopyCommand = ImportSettings & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "SnowflakeImportCopyCommand"; + /** Additional copy options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalCopyOptions": { "DATE_FORMAT": "MM/DD/YYYY", "TIME_FORMAT": "'HH24:MI:SS.FF'" } */ + additionalCopyOptions?: { [propertyName: string]: any }; + /** Additional format options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalFormatOptions": { "FORCE": "TRUE", "LOAD_UNCERTAIN_FILES": "'FALSE'" } */ + additionalFormatOptions?: { [propertyName: string]: any }; +}; + /** A copy activity tabular translator. */ export type TabularTranslator = CopyTranslator & { /** Polymorphic discriminator, which specifies the different types this object can be */ @@ -7109,6 +8008,10 @@ export type TabularTranslator = CopyTranslator & { mapComplexValuesToString?: any; /** Column mappings with logical types. Tabular->tabular example: [{"source":{"name":"CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"name":"CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. Hierarchical->tabular example: [{"source":{"path":"$.CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"path":"$.CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. Type: object (or Expression with resultType object). */ mappings?: any; + /** Whether to enable the advanced type conversion feature in the Copy activity. Type: boolean (or Expression with resultType boolean). */ + typeConversion?: any; + /** Type conversion settings */ + typeConversionSettings?: TypeConversionSettings; }; /** Trigger referenced dependency. */ @@ -7333,7 +8236,7 @@ export type WaitActivity = ControlActivity & { /** Polymorphic discriminator, which specifies the different types this object can be */ type: "Wait"; /** Duration in seconds. */ - waitTimeInSeconds: number; + waitTimeInSeconds: any; }; /** This activity executes inner activities until the specified boolean expression results to true or timeout is reached, whichever is earlier. */ @@ -7410,7 +8313,7 @@ export type WebHookActivity = ControlActivity & { body?: any; /** Authentication method used for calling the endpoint. */ authentication?: WebActivityAuthentication; - /** When set to true, statusCode, output and error in callback request body will be consumed by activity. The activity can be marked as failed by setting statusCode >= 400 in callback request. Default is false. Type: boolean (or Expression with resultType boolean). */ + /** When set to true, statusCode, output and error in callback request body will be consumed by activity. The activity can be marked as failed by setting statusCode >= 400 in callback request. Default is false. Type: boolean (or Expression with resultType boolean). */ reportStatusOnCallBack?: any; }; @@ -7440,10 +8343,18 @@ export type CopyActivity = ExecutionActivity & { enableSkipIncompatibleRow?: any; /** Redirect incompatible row settings when EnableSkipIncompatibleRow is true. */ redirectIncompatibleRowSettings?: RedirectIncompatibleRowSettings; + /** (Deprecated. Please use LogSettings) Log storage settings customer need to provide when enabling session log. */ + logStorageSettings?: LogStorageSettings; + /** Log settings customer needs provide when enabling log. */ + logSettings?: LogSettings; /** Preserve Rules. */ preserveRules?: any[]; /** Preserve rules. */ preserve?: any[]; + /** Whether to enable Data Consistency validation. Type: boolean (or Expression with resultType boolean). */ + validateDataConsistency?: any; + /** Specify the fault tolerance for data consistency. */ + skipErrorFile?: SkipErrorFile; }; /** HDInsight Hive activity type. */ @@ -7610,6 +8521,8 @@ export type CustomActivity = ExecutionActivity & { extendedProperties?: { [propertyName: string]: any }; /** The retention time for the files submitted for custom activity. Type: double (or Expression with resultType double). */ retentionTimeInDays?: any; + /** Elevation level and scope for the user, default is nonadmin task. Type: string (or Expression with resultType double). */ + autoUserSpecification?: any; }; /** SQL stored procedure activity type. */ @@ -7638,6 +8551,8 @@ export type DeleteActivity = ExecutionActivity & { logStorageSettings?: LogStorageSettings; /** Delete activity dataset reference. */ dataset: DatasetReference; + /** Delete activity store settings. */ + storeSettings?: StoreReadSettingsUnion; }; /** Azure Data Explorer command activity. */ @@ -7692,6 +8607,10 @@ export type GetMetadataActivity = ExecutionActivity & { dataset: DatasetReference; /** Fields of metadata to get from dataset. */ fieldList?: any[]; + /** GetMetadata activity store settings. */ + storeSettings?: StoreReadSettingsUnion; + /** GetMetadata activity format settings. */ + formatSettings?: FormatReadSettingsUnion; }; /** Azure ML Batch Execution activity. */ @@ -7809,13 +8728,19 @@ export type ExecuteDataFlowActivity = ExecutionActivity & { /** Polymorphic discriminator, which specifies the different types this object can be */ type: "ExecuteDataFlow"; /** Data flow reference. */ - dataFlow: DataFlowReference; + dataflow: DataFlowReference; /** Staging info for execute data flow activity. */ staging?: DataFlowStagingInfo; /** The integration runtime reference. */ integrationRuntime?: IntegrationRuntimeReference; /** Compute properties for data flow activity. */ compute?: ExecuteDataFlowActivityTypePropertiesCompute; + /** Trace level setting used for data flow monitoring output. Supported values are: 'coarse', 'fine', and 'none'. Type: string (or Expression with resultType string) */ + traceLevel?: any; + /** Continue on error setting used for data flow execution. Enables processing to continue if a sink fails. Type: boolean (or Expression with resultType boolean) */ + continueOnError?: any; + /** Concurrent run setting used for data flow execution. Allows sinks with the same save order to be processed concurrently. Type: boolean (or Expression with resultType boolean) */ + runConcurrently?: any; }; /** Execute Synapse notebook activity. */ @@ -7872,6 +8797,20 @@ export type BlobEventsTrigger = MultiplePipelineTrigger & { scope: string; }; +/** Trigger that runs every time a custom event is received. */ +export type CustomEventsTrigger = MultiplePipelineTrigger & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "CustomEventsTrigger"; + /** The event subject must begin with the pattern provided for trigger to fire. At least one of these must be provided: subjectBeginsWith, subjectEndsWith. */ + subjectBeginsWith?: string; + /** The event subject must end with the pattern provided for trigger to fire. At least one of these must be provided: subjectBeginsWith, subjectEndsWith. */ + subjectEndsWith?: string; + /** The list of event types that cause this trigger to fire. */ + events: any[]; + /** The ARM resource ID of the Azure Event Grid Topic. */ + scope: string; +}; + /** A copy activity Azure Table source. */ export type AzureTableSource = TabularSource & { /** Polymorphic discriminator, which specifies the different types this object can be */ @@ -7954,6 +8893,8 @@ export type SapCloudForCustomerSource = TabularSource & { type: "SapCloudForCustomerSource"; /** SAP Cloud for Customer OData query. For example, "$top=1". Type: string (or Expression with resultType string). */ query?: any; + /** The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ + httpRequestTimeout?: any; }; /** A copy activity source for SAP ECC source. */ @@ -7962,6 +8903,8 @@ export type SapEccSource = TabularSource & { type: "SapEccSource"; /** SAP ECC OData query. For example, "$top=1". Type: string (or Expression with resultType string). */ query?: any; + /** The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ + httpRequestTimeout?: any; }; /** A copy activity source for SAP HANA source. */ @@ -7986,6 +8929,10 @@ export type SapOpenHubSource = TabularSource & { excludeLastRequest?: any; /** The ID of request for delta loading. Once it is set, only data with requestId larger than the value of this property will be retrieved. The default value is 0. Type: integer (or Expression with resultType integer ). */ baseRequestId?: any; + /** Specifies the custom RFC function module that will be used to read data from SAP Table. Type: string (or Expression with resultType string). */ + customRfcReadTableFunctionModule?: any; + /** The single character that will be used as delimiter passed to SAP RFC as well as splitting the output data retrieved. Type: string (or Expression with resultType string). */ + sapDataColumnDelimiter?: any; }; /** A copy activity source for SAP Table source. */ @@ -8004,6 +8951,8 @@ export type SapTableSource = TabularSource & { batchSize?: any; /** Specifies the custom RFC function module that will be used to read data from SAP Table. Type: string (or Expression with resultType string). */ customRfcReadTableFunctionModule?: any; + /** The single character that will be used as delimiter passed to SAP RFC as well as splitting the output data retrieved. Type: string (or Expression with resultType string). */ + sapDataColumnDelimiter?: any; /** The partition mechanism that will be used for SAP table read in parallel. */ partitionOption?: SapTablePartitionOption; /** The settings that will be leveraged for SAP table source partitioning. */ @@ -8022,6 +8971,12 @@ export type SqlSource = TabularSource & { storedProcedureParameters?: { [propertyName: string]: StoredProcedureParameter; }; + /** Specifies the transaction locking behavior for the SQL source. Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: string (or Expression with resultType string). */ + isolationLevel?: any; + /** The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". */ + partitionOption?: any; + /** The settings that will be leveraged for Sql source partitioning. */ + partitionSettings?: SqlPartitionSettings; }; /** A copy activity SQL server source. */ @@ -8038,6 +8993,10 @@ export type SqlServerSource = TabularSource & { }; /** Which additional types to produce. */ produceAdditionalTypes?: any; + /** The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". */ + partitionOption?: any; + /** The settings that will be leveraged for Sql source partitioning. */ + partitionSettings?: SqlPartitionSettings; }; /** A copy activity Azure SQL source. */ @@ -8054,6 +9013,10 @@ export type AzureSqlSource = TabularSource & { }; /** Which additional types to produce. */ produceAdditionalTypes?: any; + /** The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". */ + partitionOption?: any; + /** The settings that will be leveraged for Sql source partitioning. */ + partitionSettings?: SqlPartitionSettings; }; /** A copy activity Azure SQL Managed Instance source. */ @@ -8070,6 +9033,10 @@ export type SqlMISource = TabularSource & { }; /** Which additional types to produce. */ produceAdditionalTypes?: any; + /** The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". */ + partitionOption?: any; + /** The settings that will be leveraged for Sql source partitioning. */ + partitionSettings?: SqlPartitionSettings; }; /** A copy activity SQL Data Warehouse source. */ @@ -8082,6 +9049,10 @@ export type SqlDWSource = TabularSource & { sqlReaderStoredProcedureName?: any; /** Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". Type: object (or Expression with resultType object), itemType: StoredProcedureParameter. */ storedProcedureParameters?: any; + /** The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". */ + partitionOption?: any; + /** The settings that will be leveraged for Sql source partitioning. */ + partitionSettings?: SqlPartitionSettings; }; /** A copy activity Azure MySQL source. */ @@ -8372,6 +9343,8 @@ export type DynamicsAXSource = TabularSource & { type: "DynamicsAXSource"; /** A query to retrieve data from source. Type: string (or Expression with resultType string). */ query?: any; + /** The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ + httpRequestTimeout?: any; }; /** A copy activity Oracle Service Cloud source. */ @@ -9082,77 +10055,21 @@ export const enum KnownJsonFormatFilePattern { */ export type JsonFormatFilePattern = string; -/** Known values of {@link DatasetCompressionLevel} that the service accepts. */ -export const enum KnownDatasetCompressionLevel { - Optimal = "Optimal", - Fastest = "Fastest" -} - -/** - * Defines values for DatasetCompressionLevel. \ - * {@link KnownDatasetCompressionLevel} can be used interchangeably with DatasetCompressionLevel, - * this enum contains the known values that the service supports. - * ### Know values supported by the service - * **Optimal** \ - * **Fastest** - */ -export type DatasetCompressionLevel = string; - -/** Known values of {@link AvroCompressionCodec} that the service accepts. */ -export const enum KnownAvroCompressionCodec { - None = "none", - Deflate = "deflate", - Snappy = "snappy", - Xz = "xz", - Bzip2 = "bzip2" -} - -/** - * Defines values for AvroCompressionCodec. \ - * {@link KnownAvroCompressionCodec} can be used interchangeably with AvroCompressionCodec, - * this enum contains the known values that the service supports. - * ### Know values supported by the service - * **none** \ - * **deflate** \ - * **snappy** \ - * **xz** \ - * **bzip2** - */ -export type AvroCompressionCodec = string; - -/** Known values of {@link ParquetCompressionCodec} that the service accepts. */ -export const enum KnownParquetCompressionCodec { - None = "none", - Gzip = "gzip", - Snappy = "snappy", - Lzo = "lzo" -} - -/** - * Defines values for ParquetCompressionCodec. \ - * {@link KnownParquetCompressionCodec} can be used interchangeably with ParquetCompressionCodec, - * this enum contains the known values that the service supports. - * ### Know values supported by the service - * **none** \ - * **gzip** \ - * **snappy** \ - * **lzo** - */ -export type ParquetCompressionCodec = string; - -/** Known values of {@link DelimitedTextCompressionCodec} that the service accepts. */ -export const enum KnownDelimitedTextCompressionCodec { +/** Known values of {@link CompressionCodec} that the service accepts. */ +export const enum KnownCompressionCodec { Bzip2 = "bzip2", Gzip = "gzip", Deflate = "deflate", ZipDeflate = "zipDeflate", Snappy = "snappy", - Lz4 = "lz4" + Lz4 = "lz4", + Tar = "tar", + TarGZip = "tarGZip" } /** - * Defines values for DelimitedTextCompressionCodec. \ - * {@link KnownDelimitedTextCompressionCodec} can be used interchangeably with DelimitedTextCompressionCodec, + * Defines values for CompressionCodec. \ + * {@link KnownCompressionCodec} can be used interchangeably with CompressionCodec, * this enum contains the known values that the service supports. * ### Know values supported by the service * **bzip2** \ @@ -9160,15 +10077,18 @@ export const enum KnownDelimitedTextCompressionCodec { * **deflate** \ * **zipDeflate** \ * **snappy** \ - * **lz4** + * **lz4** \ + * **tar** \ + * **tarGZip** */ -export type DelimitedTextCompressionCodec = string; +export type CompressionCodec = string; /** Known values of {@link OrcCompressionCodec} that the service accepts. */ export const enum KnownOrcCompressionCodec { None = "none", Zlib = "zlib", - Snappy = "snappy" + Snappy = "snappy", + Lzo = "lzo" } /** @@ -9178,7 +10098,8 @@ export const enum KnownOrcCompressionCodec { * ### Know values supported by the service * **none** \ * **zlib** \ - * **snappy** + * **snappy** \ + * **lzo** */ export type OrcCompressionCodec = string; @@ -9972,7 +10893,8 @@ export type HDInsightActivityDebugInfoOption = string; export const enum KnownSsisPackageLocationType { Ssisdb = "SSISDB", File = "File", - InlinePackage = "InlinePackage" + InlinePackage = "InlinePackage", + PackageStore = "PackageStore" } /** @@ -9982,7 +10904,8 @@ export const enum KnownSsisPackageLocationType { * ### Know values supported by the service * **SSISDB** \ * **File** \ - * **InlinePackage** + * **InlinePackage** \ + * **PackageStore** */ export type SsisPackageLocationType = string; @@ -10123,7 +11046,8 @@ export type BlobEventType = string; /** Known values of {@link TumblingWindowFrequency} that the service accepts. */ export const enum KnownTumblingWindowFrequency { Minute = "Minute", - Hour = "Hour" + Hour = "Hour", + Month = "Month" } /** @@ -10132,7 +11056,8 @@ export const enum KnownTumblingWindowFrequency { * this enum contains the known values that the service supports. * ### Know values supported by the service * **Minute** \ - * **Hour** + * **Hour** \ + * **Month** */ export type TumblingWindowFrequency = string; @@ -10250,6 +11175,64 @@ export const enum KnownIntegrationRuntimeEdition { */ export type IntegrationRuntimeEdition = string; +/** Known values of {@link DatasetCompressionLevel} that the service accepts. */ +export const enum KnownDatasetCompressionLevel { + Optimal = "Optimal", + Fastest = "Fastest" +} + +/** + * Defines values for DatasetCompressionLevel. \ + * {@link KnownDatasetCompressionLevel} can be used interchangeably with DatasetCompressionLevel, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Optimal** \ + * **Fastest** + */ +export type DatasetCompressionLevel = string; + +/** Known values of {@link AvroCompressionCodec} that the service accepts. */ +export const enum KnownAvroCompressionCodec { + None = "none", + Deflate = "deflate", + Snappy = "snappy", + Xz = "xz", + Bzip2 = "bzip2" +} + +/** + * Defines values for AvroCompressionCodec. \ + * {@link KnownAvroCompressionCodec} can be used interchangeably with AvroCompressionCodec, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **none** \ + * **deflate** \ + * **snappy** \ + * **xz** \ + * **bzip2** + */ +export type AvroCompressionCodec = string; + +/** Known values of {@link ParquetCompressionCodecEnum} that the service accepts. */ +export const enum KnownParquetCompressionCodecEnum { + None = "none", + Gzip = "gzip", + Snappy = "snappy", + Lzo = "lzo" +} + +/** + * Defines values for ParquetCompressionCodecEnum. \ + * {@link KnownParquetCompressionCodecEnum} can be used interchangeably with ParquetCompressionCodecEnum, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **none** \ + * **gzip** \ + * **snappy** \ + * **lzo** + */ +export type ParquetCompressionCodecEnum = string; + /** Known values of {@link CopyBehaviorType} that the service accepts. */ export const enum KnownCopyBehaviorType { PreserveHierarchy = "PreserveHierarchy", @@ -10267,6 +11250,24 @@ export const enum KnownCopyBehaviorType { * **MergeFiles** */ export type CopyBehaviorType = string; + +/** Known values of {@link SqlPartitionOption} that the service accepts. */ +export const enum KnownSqlPartitionOption { + None = "None", + PhysicalPartitionsOfTable = "PhysicalPartitionsOfTable", + DynamicRange = "DynamicRange" +} + +/** + * Defines values for SqlPartitionOption. \ + * {@link KnownSqlPartitionOption} can be used interchangeably with SqlPartitionOption, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **None** \ + * **PhysicalPartitionsOfTable** \ + * **DynamicRange** + */ +export type SqlPartitionOption = string; /** Defines values for ResourceIdentityType. */ export type ResourceIdentityType = "None" | "SystemAssigned"; /** Defines values for DayOfWeek. */ @@ -10313,7 +11314,8 @@ export type LinkedServiceCreateOrUpdateLinkedServiceResponse = LinkedServiceReso }; /** Optional parameters. */ -export interface LinkedServiceGetLinkedServiceOptionalParams extends coreHttp.OperationOptions { +export interface LinkedServiceGetLinkedServiceOptionalParams + extends coreHttp.OperationOptions { /** ETag of the linked service entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ ifNoneMatch?: string; } @@ -10355,7 +11357,8 @@ export type DatasetGetDatasetsByWorkspaceResponse = DatasetListResponse & { }; /** Optional parameters. */ -export interface DatasetCreateOrUpdateDatasetOptionalParams extends coreHttp.OperationOptions { +export interface DatasetCreateOrUpdateDatasetOptionalParams + extends coreHttp.OperationOptions { /** ETag of the dataset entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ ifMatch?: string; } @@ -10375,7 +11378,8 @@ export type DatasetCreateOrUpdateDatasetResponse = DatasetResource & { }; /** Optional parameters. */ -export interface DatasetGetDatasetOptionalParams extends coreHttp.OperationOptions { +export interface DatasetGetDatasetOptionalParams + extends coreHttp.OperationOptions { /** ETag of the dataset entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ ifNoneMatch?: string; } @@ -10417,7 +11421,8 @@ export type PipelineGetPipelinesByWorkspaceResponse = PipelineListResponse & { }; /** Optional parameters. */ -export interface PipelineCreateOrUpdatePipelineOptionalParams extends coreHttp.OperationOptions { +export interface PipelineCreateOrUpdatePipelineOptionalParams + extends coreHttp.OperationOptions { /** ETag of the pipeline entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ ifMatch?: string; } @@ -10437,7 +11442,8 @@ export type PipelineCreateOrUpdatePipelineResponse = PipelineResource & { }; /** Optional parameters. */ -export interface PipelineGetPipelineOptionalParams extends coreHttp.OperationOptions { +export interface PipelineGetPipelineOptionalParams + extends coreHttp.OperationOptions { /** ETag of the pipeline entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ ifNoneMatch?: string; } @@ -10455,7 +11461,8 @@ export type PipelineGetPipelineResponse = PipelineResource & { }; /** Optional parameters. */ -export interface PipelineCreatePipelineRunOptionalParams extends coreHttp.OperationOptions { +export interface PipelineCreatePipelineRunOptionalParams + extends coreHttp.OperationOptions { /** Parameters of the pipeline run. These parameters will be used only if the runId is not specified. */ parameters?: { [propertyName: string]: any }; /** The pipeline run identifier. If run ID is specified the parameters of the specified run will be used to create a new run. */ @@ -10527,7 +11534,8 @@ export type PipelineRunQueryActivityRunsResponse = ActivityRunsQueryResponse & { }; /** Optional parameters. */ -export interface PipelineRunCancelPipelineRunOptionalParams extends coreHttp.OperationOptions { +export interface PipelineRunCancelPipelineRunOptionalParams + extends coreHttp.OperationOptions { /** If true, cancel all the Child pipelines that are triggered by the current pipeline. */ isRecursive?: boolean; } @@ -10545,7 +11553,8 @@ export type TriggerGetTriggersByWorkspaceResponse = TriggerListResponse & { }; /** Optional parameters. */ -export interface TriggerCreateOrUpdateTriggerOptionalParams extends coreHttp.OperationOptions { +export interface TriggerCreateOrUpdateTriggerOptionalParams + extends coreHttp.OperationOptions { /** ETag of the trigger entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ ifMatch?: string; } @@ -10565,7 +11574,8 @@ export type TriggerCreateOrUpdateTriggerResponse = TriggerResource & { }; /** Optional parameters. */ -export interface TriggerGetTriggerOptionalParams extends coreHttp.OperationOptions { +export interface TriggerGetTriggerOptionalParams + extends coreHttp.OperationOptions { /** ETag of the trigger entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ ifNoneMatch?: string; } @@ -10647,7 +11657,8 @@ export type TriggerRunQueryTriggerRunsByWorkspaceResponse = TriggerRunsQueryResp }; /** Optional parameters. */ -export interface DataFlowCreateOrUpdateDataFlowOptionalParams extends coreHttp.OperationOptions { +export interface DataFlowCreateOrUpdateDataFlowOptionalParams + extends coreHttp.OperationOptions { /** ETag of the data flow entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ ifMatch?: string; } @@ -10667,7 +11678,8 @@ export type DataFlowCreateOrUpdateDataFlowResponse = DataFlowResource & { }; /** Optional parameters. */ -export interface DataFlowGetDataFlowOptionalParams extends coreHttp.OperationOptions { +export interface DataFlowGetDataFlowOptionalParams + extends coreHttp.OperationOptions { /** ETag of the data flow entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ ifNoneMatch?: string; } @@ -10785,7 +11797,8 @@ export type SqlScriptGetSqlScriptsByWorkspaceResponse = SqlScriptsListResponse & }; /** Optional parameters. */ -export interface SqlScriptCreateOrUpdateSqlScriptOptionalParams extends coreHttp.OperationOptions { +export interface SqlScriptCreateOrUpdateSqlScriptOptionalParams + extends coreHttp.OperationOptions { /** ETag of the SQL script entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ ifMatch?: string; } @@ -10805,7 +11818,8 @@ export type SqlScriptCreateOrUpdateSqlScriptResponse = SqlScriptResource & { }; /** Optional parameters. */ -export interface SqlScriptGetSqlScriptOptionalParams extends coreHttp.OperationOptions { +export interface SqlScriptGetSqlScriptOptionalParams + extends coreHttp.OperationOptions { /** ETag of the sql compute entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ ifNoneMatch?: string; } @@ -10951,7 +11965,8 @@ export type NotebookGetNotebookSummaryByWorkSpaceResponse = NotebookListResponse }; /** Optional parameters. */ -export interface NotebookCreateOrUpdateNotebookOptionalParams extends coreHttp.OperationOptions { +export interface NotebookCreateOrUpdateNotebookOptionalParams + extends coreHttp.OperationOptions { /** ETag of the Note book entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ ifMatch?: string; } @@ -10971,7 +11986,8 @@ export type NotebookCreateOrUpdateNotebookResponse = NotebookResource & { }; /** Optional parameters. */ -export interface NotebookGetNotebookOptionalParams extends coreHttp.OperationOptions { +export interface NotebookGetNotebookOptionalParams + extends coreHttp.OperationOptions { /** ETag of the Notebook entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ ifNoneMatch?: string; } @@ -11170,7 +12186,8 @@ export type WorkspaceGitRepoManagementGetGitHubAccessTokenResponse = GitHubAcces }; /** Optional parameters. */ -export interface ArtifactsClientOptionalParams extends coreHttp.ServiceClientOptions { +export interface ArtifactsClientOptionalParams + extends coreHttp.ServiceClientOptions { /** Api Version */ apiVersion?: string; /** Overrides client endpoint. */ diff --git a/sdk/synapse/synapse-artifacts/src/models/mappers.ts b/sdk/synapse/synapse-artifacts/src/models/mappers.ts index 14f6a0307770..40d244b6eaf3 100644 --- a/sdk/synapse/synapse-artifacts/src/models/mappers.ts +++ b/sdk/synapse/synapse-artifacts/src/models/mappers.ts @@ -4806,6 +4806,29 @@ export const FormatReadSettings: coreHttp.CompositeMapper = { } }; +export const CompressionReadSettings: coreHttp.CompositeMapper = { + serializedName: "CompressionReadSettings", + type: { + name: "Composite", + className: "CompressionReadSettings", + uberParent: "CompressionReadSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + } + } + } +}; + export const FormatWriteSettings: coreHttp.CompositeMapper = { serializedName: "FormatWriteSettings", type: { @@ -4829,6 +4852,27 @@ export const FormatWriteSettings: coreHttp.CompositeMapper = { } }; +export const AdditionalColumns: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "AdditionalColumns", + modelProperties: { + name: { + serializedName: "name", + type: { + name: "any" + } + }, + value: { + serializedName: "value", + type: { + name: "any" + } + } + } + } +}; + export const CopySource: coreHttp.CompositeMapper = { serializedName: "CopySource", type: { @@ -4975,6 +5019,134 @@ export const RedirectIncompatibleRowSettings: coreHttp.CompositeMapper = { } }; +export const LogStorageSettings: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "LogStorageSettings", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + linkedServiceName: { + serializedName: "linkedServiceName", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + }, + path: { + serializedName: "path", + type: { + name: "any" + } + }, + logLevel: { + serializedName: "logLevel", + type: { + name: "any" + } + }, + enableReliableLogging: { + serializedName: "enableReliableLogging", + type: { + name: "any" + } + } + } + } +}; + +export const LogSettings: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "LogSettings", + modelProperties: { + enableCopyActivityLog: { + serializedName: "enableCopyActivityLog", + type: { + name: "any" + } + }, + copyActivityLogSettings: { + serializedName: "copyActivityLogSettings", + type: { + name: "Composite", + className: "CopyActivityLogSettings" + } + }, + logLocationSettings: { + serializedName: "logLocationSettings", + type: { + name: "Composite", + className: "LogLocationSettings" + } + } + } + } +}; + +export const CopyActivityLogSettings: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "CopyActivityLogSettings", + modelProperties: { + logLevel: { + serializedName: "logLevel", + type: { + name: "any" + } + }, + enableReliableLogging: { + serializedName: "enableReliableLogging", + type: { + name: "any" + } + } + } + } +}; + +export const LogLocationSettings: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "LogLocationSettings", + modelProperties: { + linkedServiceName: { + serializedName: "linkedServiceName", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + }, + path: { + serializedName: "path", + type: { + name: "any" + } + } + } + } +}; + +export const SkipErrorFile: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SkipErrorFile", + modelProperties: { + fileMissing: { + serializedName: "fileMissing", + type: { + name: "any" + } + }, + dataInconsistency: { + serializedName: "dataInconsistency", + type: { + name: "any" + } + } + } + } +}; + export const SapHanaPartitionSettings: coreHttp.CompositeMapper = { type: { name: "Composite", @@ -5044,6 +5216,33 @@ export const StoredProcedureParameter: coreHttp.CompositeMapper = { } }; +export const SqlPartitionSettings: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SqlPartitionSettings", + modelProperties: { + partitionColumnName: { + serializedName: "partitionColumnName", + type: { + name: "any" + } + }, + partitionUpperBound: { + serializedName: "partitionUpperBound", + type: { + name: "any" + } + }, + partitionLowerBound: { + serializedName: "partitionLowerBound", + type: { + name: "any" + } + } + } + } +}; + export const OraclePartitionSettings: coreHttp.CompositeMapper = { type: { name: "Composite", @@ -5188,6 +5387,52 @@ export const RedshiftUnloadSettings: coreHttp.CompositeMapper = { } }; +export const ExportSettings: coreHttp.CompositeMapper = { + serializedName: "ExportSettings", + type: { + name: "Composite", + className: "ExportSettings", + uberParent: "ExportSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const ImportSettings: coreHttp.CompositeMapper = { + serializedName: "ImportSettings", + type: { + name: "Composite", + className: "ImportSettings", + uberParent: "ImportSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + } + } + } +}; + export const PolybaseSettings: coreHttp.CompositeMapper = { type: { name: "Composite", @@ -5271,29 +5516,6 @@ export const DWCopyCommandDefaultValue: coreHttp.CompositeMapper = { } }; -export const LogStorageSettings: coreHttp.CompositeMapper = { - type: { - name: "Composite", - className: "LogStorageSettings", - additionalProperties: { type: { name: "Object" } }, - modelProperties: { - linkedServiceName: { - serializedName: "linkedServiceName", - type: { - name: "Composite", - className: "LinkedServiceReference" - } - }, - path: { - serializedName: "path", - type: { - name: "any" - } - } - } - } -}; - export const CopyTranslator: coreHttp.CompositeMapper = { serializedName: "CopyTranslator", type: { @@ -5317,6 +5539,51 @@ export const CopyTranslator: coreHttp.CompositeMapper = { } }; +export const TypeConversionSettings: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "TypeConversionSettings", + modelProperties: { + allowDataTruncation: { + serializedName: "allowDataTruncation", + type: { + name: "any" + } + }, + treatBooleanAsNumber: { + serializedName: "treatBooleanAsNumber", + type: { + name: "any" + } + }, + dateTimeFormat: { + serializedName: "dateTimeFormat", + type: { + name: "any" + } + }, + dateTimeOffsetFormat: { + serializedName: "dateTimeOffsetFormat", + type: { + name: "any" + } + }, + timeSpanFormat: { + serializedName: "timeSpanFormat", + type: { + name: "any" + } + }, + culture: { + serializedName: "culture", + type: { + name: "any" + } + } + } + } +}; + export const SsisPackageLocation: coreHttp.CompositeMapper = { type: { name: "Composite", @@ -5354,7 +5621,14 @@ export const SsisPackageLocation: coreHttp.CompositeMapper = { name: "any" } }, - packageName: { + configurationAccessCredential: { + serializedName: "typeProperties.configurationAccessCredential", + type: { + name: "Composite", + className: "SsisAccessCredential" + } + }, + packageName: { serializedName: "typeProperties.packageName", type: { name: "String" @@ -6230,6 +6504,30 @@ export const CustomSetupBase: coreHttp.CompositeMapper = { } }; +export const ManagedVirtualNetworkReference: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ManagedVirtualNetworkReference", + modelProperties: { + type: { + defaultValue: "ManagedVirtualNetworkReference", + isConstant: true, + serializedName: "type", + type: { + name: "String" + } + }, + referenceName: { + serializedName: "referenceName", + required: true, + type: { + name: "String" + } + } + } + } +}; + export const LinkedIntegrationRuntimeType: coreHttp.CompositeMapper = { type: { name: "Composite", @@ -6358,6 +6656,12 @@ export const AzureBlobStorageLinkedService: coreHttp.CompositeMapper = { name: "any" } }, + azureCloudType: { + serializedName: "typeProperties.azureCloudType", + type: { + name: "any" + } + }, encryptedCredential: { serializedName: "typeProperties.encryptedCredential", type: { @@ -6457,6 +6761,12 @@ export const AzureSqlDWLinkedService: coreHttp.CompositeMapper = { name: "any" } }, + azureCloudType: { + serializedName: "typeProperties.azureCloudType", + type: { + name: "any" + } + }, encryptedCredential: { serializedName: "typeProperties.encryptedCredential", type: { @@ -6550,6 +6860,12 @@ export const AzureSqlDatabaseLinkedService: coreHttp.CompositeMapper = { name: "any" } }, + azureCloudType: { + serializedName: "typeProperties.azureCloudType", + type: { + name: "any" + } + }, encryptedCredential: { serializedName: "typeProperties.encryptedCredential", type: { @@ -6603,6 +6919,12 @@ export const AzureSqlMILinkedService: coreHttp.CompositeMapper = { name: "any" } }, + azureCloudType: { + serializedName: "typeProperties.azureCloudType", + type: { + name: "any" + } + }, encryptedCredential: { serializedName: "typeProperties.encryptedCredential", type: { @@ -6754,25 +7076,25 @@ export const DynamicsLinkedService: coreHttp.CompositeMapper = { hostName: { serializedName: "typeProperties.hostName", type: { - name: "String" + name: "any" } }, port: { serializedName: "typeProperties.port", type: { - name: "String" + name: "any" } }, serviceUri: { serializedName: "typeProperties.serviceUri", type: { - name: "String" + name: "any" } }, organizationName: { serializedName: "typeProperties.organizationName", type: { - name: "String" + name: "any" } }, authenticationType: { @@ -6894,7 +7216,7 @@ export const DynamicsCrmLinkedService: coreHttp.CompositeMapper = { servicePrincipalCredentialType: { serializedName: "typeProperties.servicePrincipalCredentialType", type: { - name: "String" + name: "any" } }, servicePrincipalCredential: { @@ -6984,7 +7306,7 @@ export const CommonDataServiceForAppsLinkedService: coreHttp.CompositeMapper = { servicePrincipalCredentialType: { serializedName: "typeProperties.servicePrincipalCredentialType", type: { - name: "String" + name: "any" } }, servicePrincipalCredential: { @@ -7140,6 +7462,44 @@ export const AzureFileStorageLinkedService: coreHttp.CompositeMapper = { className: "SecretBase" } }, + connectionString: { + serializedName: "typeProperties.connectionString", + type: { + name: "any" + } + }, + accountKey: { + serializedName: "typeProperties.accountKey", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + sasUri: { + serializedName: "typeProperties.sasUri", + type: { + name: "any" + } + }, + sasToken: { + serializedName: "typeProperties.sasToken", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + fileShare: { + serializedName: "typeProperties.fileShare", + type: { + name: "any" + } + }, + snapshot: { + serializedName: "typeProperties.snapshot", + type: { + name: "any" + } + }, encryptedCredential: { serializedName: "typeProperties.encryptedCredential", type: { @@ -7394,6 +7754,12 @@ export const Db2LinkedService: coreHttp.CompositeMapper = { polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + type: { + name: "any" + } + }, server: { serializedName: "typeProperties.server", required: true, @@ -7872,6 +8238,12 @@ export const ODataLinkedService: coreHttp.CompositeMapper = { name: "any" } }, + azureCloudType: { + serializedName: "typeProperties.azureCloudType", + type: { + name: "any" + } + }, aadResourceId: { serializedName: "typeProperties.aadResourceId", type: { @@ -8065,6 +8437,34 @@ export const MongoDbLinkedService: coreHttp.CompositeMapper = { } }; +export const MongoDbAtlasLinkedService: coreHttp.CompositeMapper = { + serializedName: "MongoDbAtlas", + type: { + name: "Composite", + className: "MongoDbAtlasLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + required: true, + type: { + name: "any" + } + }, + database: { + serializedName: "typeProperties.database", + required: true, + type: { + name: "any" + } + } + } + } +}; + export const MongoDbV2LinkedService: coreHttp.CompositeMapper = { serializedName: "MongoDbV2", type: { @@ -8157,6 +8557,12 @@ export const AzureDataLakeStoreLinkedService: coreHttp.CompositeMapper = { name: "any" } }, + azureCloudType: { + serializedName: "typeProperties.azureCloudType", + type: { + name: "any" + } + }, accountName: { serializedName: "typeProperties.accountName", type: { @@ -8227,6 +8633,12 @@ export const AzureBlobFSLinkedService: coreHttp.CompositeMapper = { name: "any" } }, + azureCloudType: { + serializedName: "typeProperties.azureCloudType", + type: { + name: "any" + } + }, encryptedCredential: { serializedName: "typeProperties.encryptedCredential", type: { @@ -8321,6 +8733,12 @@ export const SalesforceLinkedService: coreHttp.CompositeMapper = { className: "SecretBase" } }, + apiVersion: { + serializedName: "typeProperties.apiVersion", + type: { + name: "any" + } + }, encryptedCredential: { serializedName: "typeProperties.encryptedCredential", type: { @@ -8367,6 +8785,12 @@ export const SalesforceServiceCloudLinkedService: coreHttp.CompositeMapper = { className: "SecretBase" } }, + apiVersion: { + serializedName: "typeProperties.apiVersion", + type: { + name: "any" + } + }, extendedProperties: { serializedName: "typeProperties.extendedProperties", type: { @@ -8500,6 +8924,12 @@ export const SapOpenHubLinkedService: coreHttp.CompositeMapper = { name: "any" } }, + systemId: { + serializedName: "typeProperties.systemId", + type: { + name: "any" + } + }, userName: { serializedName: "typeProperties.userName", type: { @@ -8513,6 +8943,24 @@ export const SapOpenHubLinkedService: coreHttp.CompositeMapper = { className: "SecretBase" } }, + messageServer: { + serializedName: "typeProperties.messageServer", + type: { + name: "any" + } + }, + messageServerService: { + serializedName: "typeProperties.messageServerService", + type: { + name: "any" + } + }, + logonGroup: { + serializedName: "typeProperties.logonGroup", + type: { + name: "any" + } + }, encryptedCredential: { serializedName: "typeProperties.encryptedCredential", type: { @@ -8585,6 +9033,12 @@ export const RestServiceLinkedService: coreHttp.CompositeMapper = { name: "any" } }, + azureCloudType: { + serializedName: "typeProperties.azureCloudType", + type: { + name: "any" + } + }, aadResourceId: { serializedName: "typeProperties.aadResourceId", type: { @@ -8611,6 +9065,12 @@ export const AmazonS3LinkedService: coreHttp.CompositeMapper = { polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { ...LinkedService.type.modelProperties, + authenticationType: { + serializedName: "typeProperties.authenticationType", + type: { + name: "any" + } + }, accessKeyId: { serializedName: "typeProperties.accessKeyId", type: { @@ -8630,6 +9090,13 @@ export const AmazonS3LinkedService: coreHttp.CompositeMapper = { name: "any" } }, + sessionToken: { + serializedName: "typeProperties.sessionToken", + type: { + name: "Composite", + className: "SecretBase" + } + }, encryptedCredential: { serializedName: "typeProperties.encryptedCredential", type: { @@ -9189,6 +9656,12 @@ export const ConcurLinkedService: coreHttp.CompositeMapper = { polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { ...LinkedService.type.modelProperties, + connectionProperties: { + serializedName: "typeProperties.connectionProperties", + type: { + name: "any" + } + }, clientId: { serializedName: "typeProperties.clientId", required: true, @@ -10340,6 +10813,12 @@ export const QuickBooksLinkedService: coreHttp.CompositeMapper = { polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { ...LinkedService.type.modelProperties, + connectionProperties: { + serializedName: "typeProperties.connectionProperties", + type: { + name: "any" + } + }, endpoint: { serializedName: "typeProperties.endpoint", required: true, @@ -10640,6 +11119,12 @@ export const SquareLinkedService: coreHttp.CompositeMapper = { polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { ...LinkedService.type.modelProperties, + connectionProperties: { + serializedName: "typeProperties.connectionProperties", + type: { + name: "any" + } + }, host: { serializedName: "typeProperties.host", required: true, @@ -10706,6 +11191,12 @@ export const XeroLinkedService: coreHttp.CompositeMapper = { polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { ...LinkedService.type.modelProperties, + connectionProperties: { + serializedName: "typeProperties.connectionProperties", + type: { + name: "any" + } + }, host: { serializedName: "typeProperties.host", required: true, @@ -10765,6 +11256,12 @@ export const ZohoLinkedService: coreHttp.CompositeMapper = { polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { ...LinkedService.type.modelProperties, + connectionProperties: { + serializedName: "typeProperties.connectionProperties", + type: { + name: "any" + } + }, endpoint: { serializedName: "typeProperties.endpoint", required: true, @@ -10883,6 +11380,12 @@ export const SalesforceMarketingCloudLinkedService: coreHttp.CompositeMapper = { polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { ...LinkedService.type.modelProperties, + connectionProperties: { + serializedName: "typeProperties.connectionProperties", + type: { + name: "any" + } + }, clientId: { serializedName: "typeProperties.clientId", required: true, @@ -11249,6 +11752,18 @@ export const AzureDatabricksLinkedService: coreHttp.CompositeMapper = { className: "SecretBase" } }, + authentication: { + serializedName: "typeProperties.authentication", + type: { + name: "any" + } + }, + workspaceResourceId: { + serializedName: "typeProperties.workspaceResourceId", + type: { + name: "any" + } + }, existingClusterId: { serializedName: "typeProperties.existingClusterId", type: { @@ -11300,6 +11815,12 @@ export const AzureDatabricksLinkedService: coreHttp.CompositeMapper = { value: { type: { name: "any" } } } }, + newClusterLogDestination: { + serializedName: "typeProperties.newClusterLogDestination", + type: { + name: "any" + } + }, newClusterDriverNodeType: { serializedName: "typeProperties.newClusterDriverNodeType", type: { @@ -11318,6 +11839,52 @@ export const AzureDatabricksLinkedService: coreHttp.CompositeMapper = { name: "any" } }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + }, + policyId: { + serializedName: "typeProperties.policyId", + type: { + name: "any" + } + } + } + } +}; + +export const AzureDatabricksDeltaLakeLinkedService: coreHttp.CompositeMapper = { + serializedName: "AzureDatabricksDeltaLake", + type: { + name: "Composite", + className: "AzureDatabricksDeltaLakeLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + domain: { + serializedName: "typeProperties.domain", + required: true, + type: { + name: "any" + } + }, + accessToken: { + serializedName: "typeProperties.accessToken", + type: { + name: "Composite", + className: "SecretBase" + } + }, + clusterId: { + serializedName: "typeProperties.clusterId", + type: { + name: "any" + } + }, encryptedCredential: { serializedName: "typeProperties.encryptedCredential", type: { @@ -11780,6 +12347,88 @@ export const AzureFunctionLinkedService: coreHttp.CompositeMapper = { } }; +export const SnowflakeLinkedService: coreHttp.CompositeMapper = { + serializedName: "Snowflake", + type: { + name: "Composite", + className: "SnowflakeLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + required: true, + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const SharePointOnlineListLinkedService: coreHttp.CompositeMapper = { + serializedName: "SharePointOnlineList", + type: { + name: "Composite", + className: "SharePointOnlineListLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + siteUrl: { + serializedName: "typeProperties.siteUrl", + required: true, + type: { + name: "any" + } + }, + tenantId: { + serializedName: "typeProperties.tenantId", + required: true, + type: { + name: "any" + } + }, + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", + required: true, + type: { + name: "any" + } + }, + servicePrincipalKey: { + serializedName: "typeProperties.servicePrincipalKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + export const AzureEntityResource: coreHttp.CompositeMapper = { type: { name: "Composite", @@ -11831,6 +12480,71 @@ export const TrackedResource: coreHttp.CompositeMapper = { } }; +export const AmazonS3Dataset: coreHttp.CompositeMapper = { + serializedName: "AmazonS3Object", + type: { + name: "Composite", + className: "AmazonS3Dataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + bucketName: { + serializedName: "typeProperties.bucketName", + required: true, + type: { + name: "any" + } + }, + key: { + serializedName: "typeProperties.key", + type: { + name: "any" + } + }, + prefix: { + serializedName: "typeProperties.prefix", + type: { + name: "any" + } + }, + version: { + serializedName: "typeProperties.version", + type: { + name: "any" + } + }, + modifiedDatetimeStart: { + serializedName: "typeProperties.modifiedDatetimeStart", + type: { + name: "any" + } + }, + modifiedDatetimeEnd: { + serializedName: "typeProperties.modifiedDatetimeEnd", + type: { + name: "any" + } + }, + format: { + serializedName: "typeProperties.format", + type: { + name: "Composite", + className: "DatasetStorageFormat" + } + }, + compression: { + serializedName: "typeProperties.compression", + type: { + name: "Composite", + className: "DatasetCompression" + } + } + } + } +}; + export const AvroDataset: coreHttp.CompositeMapper = { serializedName: "Avro", type: { @@ -11851,7 +12565,7 @@ export const AvroDataset: coreHttp.CompositeMapper = { avroCompressionCodec: { serializedName: "typeProperties.avroCompressionCodec", type: { - name: "String" + name: "any" } }, avroCompressionLevel: { @@ -11868,6 +12582,58 @@ export const AvroDataset: coreHttp.CompositeMapper = { } }; +export const ExcelDataset: coreHttp.CompositeMapper = { + serializedName: "Excel", + type: { + name: "Composite", + className: "ExcelDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + location: { + serializedName: "typeProperties.location", + type: { + name: "Composite", + className: "DatasetLocation" + } + }, + sheetName: { + serializedName: "typeProperties.sheetName", + type: { + name: "any" + } + }, + range: { + serializedName: "typeProperties.range", + type: { + name: "any" + } + }, + firstRowAsHeader: { + serializedName: "typeProperties.firstRowAsHeader", + type: { + name: "any" + } + }, + compression: { + serializedName: "typeProperties.compression", + type: { + name: "Composite", + className: "DatasetCompression" + } + }, + nullValue: { + serializedName: "typeProperties.nullValue", + type: { + name: "any" + } + } + } + } +}; + export const ParquetDataset: coreHttp.CompositeMapper = { serializedName: "Parquet", type: { @@ -11888,7 +12654,7 @@ export const ParquetDataset: coreHttp.CompositeMapper = { compressionCodec: { serializedName: "typeProperties.compressionCodec", type: { - name: "String" + name: "any" } } } @@ -11939,7 +12705,7 @@ export const DelimitedTextDataset: coreHttp.CompositeMapper = { compressionLevel: { serializedName: "typeProperties.compressionLevel", type: { - name: "String" + name: "any" } }, quoteChar: { @@ -12004,6 +12770,46 @@ export const JsonDataset: coreHttp.CompositeMapper = { } }; +export const XmlDataset: coreHttp.CompositeMapper = { + serializedName: "Xml", + type: { + name: "Composite", + className: "XmlDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + location: { + serializedName: "typeProperties.location", + type: { + name: "Composite", + className: "DatasetLocation" + } + }, + encodingName: { + serializedName: "typeProperties.encodingName", + type: { + name: "any" + } + }, + nullValue: { + serializedName: "typeProperties.nullValue", + type: { + name: "any" + } + }, + compression: { + serializedName: "typeProperties.compression", + type: { + name: "Composite", + className: "DatasetCompression" + } + } + } + } +}; + export const OrcDataset: coreHttp.CompositeMapper = { serializedName: "Orc", type: { @@ -12059,62 +12865,120 @@ export const BinaryDataset: coreHttp.CompositeMapper = { } }; -export const AzureTableDataset: coreHttp.CompositeMapper = { - serializedName: "AzureTable", +export const AzureBlobDataset: coreHttp.CompositeMapper = { + serializedName: "AzureBlob", type: { name: "Composite", - className: "AzureTableDataset", + className: "AzureBlobDataset", uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", - required: true, + folderPath: { + serializedName: "typeProperties.folderPath", type: { name: "any" } - } - } - } -}; - -export const AzureSqlTableDataset: coreHttp.CompositeMapper = { - serializedName: "AzureSqlTable", - type: { - name: "Composite", - className: "AzureSqlTableDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + }, + tableRootLocation: { + serializedName: "typeProperties.tableRootLocation", type: { name: "any" } }, - schemaTypePropertiesSchema: { - serializedName: "typeProperties.schema", + fileName: { + serializedName: "typeProperties.fileName", type: { name: "any" } }, - table: { - serializedName: "typeProperties.table", + modifiedDatetimeStart: { + serializedName: "typeProperties.modifiedDatetimeStart", type: { name: "any" } - } - } - } -}; - -export const AzureSqlMITableDataset: coreHttp.CompositeMapper = { - serializedName: "AzureSqlMITable", - type: { + }, + modifiedDatetimeEnd: { + serializedName: "typeProperties.modifiedDatetimeEnd", + type: { + name: "any" + } + }, + format: { + serializedName: "typeProperties.format", + type: { + name: "Composite", + className: "DatasetStorageFormat" + } + }, + compression: { + serializedName: "typeProperties.compression", + type: { + name: "Composite", + className: "DatasetCompression" + } + } + } + } +}; + +export const AzureTableDataset: coreHttp.CompositeMapper = { + serializedName: "AzureTable", + type: { + name: "Composite", + className: "AzureTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + required: true, + type: { + name: "any" + } + } + } + } +}; + +export const AzureSqlTableDataset: coreHttp.CompositeMapper = { + serializedName: "AzureSqlTable", + type: { + name: "Composite", + className: "AzureSqlTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + }, + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", + type: { + name: "any" + } + }, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + } + } + } +}; + +export const AzureSqlMITableDataset: coreHttp.CompositeMapper = { + serializedName: "AzureSqlMITable", + type: { name: "Composite", className: "AzureSqlMITableDataset", uberParent: "Dataset", @@ -12324,6 +13188,86 @@ export const CommonDataServiceForAppsEntityDataset: coreHttp.CompositeMapper = { } }; +export const AzureDataLakeStoreDataset: coreHttp.CompositeMapper = { + serializedName: "AzureDataLakeStoreFile", + type: { + name: "Composite", + className: "AzureDataLakeStoreDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + folderPath: { + serializedName: "typeProperties.folderPath", + type: { + name: "any" + } + }, + fileName: { + serializedName: "typeProperties.fileName", + type: { + name: "any" + } + }, + format: { + serializedName: "typeProperties.format", + type: { + name: "Composite", + className: "DatasetStorageFormat" + } + }, + compression: { + serializedName: "typeProperties.compression", + type: { + name: "Composite", + className: "DatasetCompression" + } + } + } + } +}; + +export const AzureBlobFSDataset: coreHttp.CompositeMapper = { + serializedName: "AzureBlobFSFile", + type: { + name: "Composite", + className: "AzureBlobFSDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + folderPath: { + serializedName: "typeProperties.folderPath", + type: { + name: "any" + } + }, + fileName: { + serializedName: "typeProperties.fileName", + type: { + name: "any" + } + }, + format: { + serializedName: "typeProperties.format", + type: { + name: "Composite", + className: "DatasetStorageFormat" + } + }, + compression: { + serializedName: "typeProperties.compression", + type: { + name: "Composite", + className: "DatasetCompression" + } + } + } + } +}; + export const Office365Dataset: coreHttp.CompositeMapper = { serializedName: "Office365Table", type: { @@ -12351,6 +13295,64 @@ export const Office365Dataset: coreHttp.CompositeMapper = { } }; +export const FileShareDataset: coreHttp.CompositeMapper = { + serializedName: "FileShare", + type: { + name: "Composite", + className: "FileShareDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + folderPath: { + serializedName: "typeProperties.folderPath", + type: { + name: "any" + } + }, + fileName: { + serializedName: "typeProperties.fileName", + type: { + name: "any" + } + }, + modifiedDatetimeStart: { + serializedName: "typeProperties.modifiedDatetimeStart", + type: { + name: "any" + } + }, + modifiedDatetimeEnd: { + serializedName: "typeProperties.modifiedDatetimeEnd", + type: { + name: "any" + } + }, + format: { + serializedName: "typeProperties.format", + type: { + name: "Composite", + className: "DatasetStorageFormat" + } + }, + fileFilter: { + serializedName: "typeProperties.fileFilter", + type: { + name: "any" + } + }, + compression: { + serializedName: "typeProperties.compression", + type: { + name: "Composite", + className: "DatasetCompression" + } + } + } + } +}; + export const MongoDbCollectionDataset: coreHttp.CompositeMapper = { serializedName: "MongoDbCollection", type: { @@ -12372,6 +13374,27 @@ export const MongoDbCollectionDataset: coreHttp.CompositeMapper = { } }; +export const MongoDbAtlasCollectionDataset: coreHttp.CompositeMapper = { + serializedName: "MongoDbAtlasCollection", + type: { + name: "Composite", + className: "MongoDbAtlasCollectionDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + collection: { + serializedName: "typeProperties.collection", + required: true, + type: { + name: "any" + } + } + } + } +}; + export const MongoDbV2CollectionDataset: coreHttp.CompositeMapper = { serializedName: "MongoDbV2Collection", type: { @@ -13034,6 +14057,58 @@ export const AzureSearchIndexDataset: coreHttp.CompositeMapper = { } }; +export const HttpDataset: coreHttp.CompositeMapper = { + serializedName: "HttpFile", + type: { + name: "Composite", + className: "HttpDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + relativeUrl: { + serializedName: "typeProperties.relativeUrl", + type: { + name: "any" + } + }, + requestMethod: { + serializedName: "typeProperties.requestMethod", + type: { + name: "any" + } + }, + requestBody: { + serializedName: "typeProperties.requestBody", + type: { + name: "any" + } + }, + additionalHeaders: { + serializedName: "typeProperties.additionalHeaders", + type: { + name: "any" + } + }, + format: { + serializedName: "typeProperties.format", + type: { + name: "Composite", + className: "DatasetStorageFormat" + } + }, + compression: { + serializedName: "typeProperties.compression", + type: { + name: "Composite", + className: "DatasetCompression" + } + } + } + } +}; + export const AmazonMWSObjectDataset: coreHttp.CompositeMapper = { serializedName: "AmazonMWSObject", type: { @@ -13867,8 +14942,80 @@ export const GoogleAdWordsObjectDataset: coreHttp.CompositeMapper = { } }; -export const ControlActivity: coreHttp.CompositeMapper = { - serializedName: "Container", +export const SnowflakeDataset: coreHttp.CompositeMapper = { + serializedName: "SnowflakeTable", + type: { + name: "Composite", + className: "SnowflakeDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", + type: { + name: "any" + } + }, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + } + } + } +}; + +export const SharePointOnlineListResourceDataset: coreHttp.CompositeMapper = { + serializedName: "SharePointOnlineListResource", + type: { + name: "Composite", + className: "SharePointOnlineListResourceDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + listName: { + serializedName: "typeProperties.listName", + type: { + name: "any" + } + } + } + } +}; + +export const AzureDatabricksDeltaLakeDataset: coreHttp.CompositeMapper = { + serializedName: "AzureDatabricksDeltaLakeDataset", + type: { + name: "Composite", + className: "AzureDatabricksDeltaLakeDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + }, + database: { + serializedName: "typeProperties.database", + type: { + name: "any" + } + } + } + } +}; + +export const ControlActivity: coreHttp.CompositeMapper = { + serializedName: "Container", type: { name: "Composite", className: "ControlActivity", @@ -13964,6 +15111,7 @@ export const RerunTumblingWindowTrigger: coreHttp.CompositeMapper = { ...Trigger.type.modelProperties, parentTrigger: { serializedName: "typeProperties.parentTrigger", + required: true, type: { name: "any" } @@ -13982,12 +15130,12 @@ export const RerunTumblingWindowTrigger: coreHttp.CompositeMapper = { name: "DateTime" } }, - maxConcurrency: { + rerunConcurrency: { constraints: { InclusiveMaximum: 50, InclusiveMinimum: 1 }, - serializedName: "typeProperties.maxConcurrency", + serializedName: "typeProperties.rerunConcurrency", required: true, type: { name: "Number" @@ -14274,6 +15422,13 @@ export const ManagedIntegrationRuntime: coreHttp.CompositeMapper = { name: "String" } }, + managedVirtualNetwork: { + serializedName: "managedVirtualNetwork", + type: { + name: "Composite", + className: "ManagedVirtualNetworkReference" + } + }, computeProperties: { serializedName: "typeProperties.computeProperties", type: { @@ -14378,6 +15533,20 @@ export const DataFlowSource: coreHttp.CompositeMapper = { name: "Composite", className: "DatasetReference" } + }, + linkedService: { + serializedName: "linkedService", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + }, + schemaLinkedService: { + serializedName: "schemaLinkedService", + type: { + name: "Composite", + className: "LinkedServiceReference" + } } } } @@ -14395,6 +15564,20 @@ export const DataFlowSink: coreHttp.CompositeMapper = { name: "Composite", className: "DatasetReference" } + }, + linkedService: { + serializedName: "linkedService", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + }, + schemaLinkedService: { + serializedName: "schemaLinkedService", + type: { + name: "Composite", + className: "LinkedServiceReference" + } } } } @@ -14603,7 +15786,8 @@ export const TextFormat: coreHttp.CompositeMapper = { className: "TextFormat", uberParent: "DatasetStorageFormat", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: DatasetStorageFormat.type.polymorphicDiscriminator, + polymorphicDiscriminator: + DatasetStorageFormat.type.polymorphicDiscriminator, modelProperties: { ...DatasetStorageFormat.type.modelProperties, columnDelimiter: { @@ -14671,7 +15855,8 @@ export const JsonFormat: coreHttp.CompositeMapper = { className: "JsonFormat", uberParent: "DatasetStorageFormat", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: DatasetStorageFormat.type.polymorphicDiscriminator, + polymorphicDiscriminator: + DatasetStorageFormat.type.polymorphicDiscriminator, modelProperties: { ...DatasetStorageFormat.type.modelProperties, filePattern: { @@ -14715,7 +15900,8 @@ export const AvroFormat: coreHttp.CompositeMapper = { className: "AvroFormat", uberParent: "DatasetStorageFormat", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: DatasetStorageFormat.type.polymorphicDiscriminator, + polymorphicDiscriminator: + DatasetStorageFormat.type.polymorphicDiscriminator, modelProperties: { ...DatasetStorageFormat.type.modelProperties } @@ -14729,7 +15915,8 @@ export const OrcFormat: coreHttp.CompositeMapper = { className: "OrcFormat", uberParent: "DatasetStorageFormat", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: DatasetStorageFormat.type.polymorphicDiscriminator, + polymorphicDiscriminator: + DatasetStorageFormat.type.polymorphicDiscriminator, modelProperties: { ...DatasetStorageFormat.type.modelProperties } @@ -14743,7 +15930,8 @@ export const ParquetFormat: coreHttp.CompositeMapper = { className: "ParquetFormat", uberParent: "DatasetStorageFormat", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: DatasetStorageFormat.type.polymorphicDiscriminator, + polymorphicDiscriminator: + DatasetStorageFormat.type.polymorphicDiscriminator, modelProperties: { ...DatasetStorageFormat.type.modelProperties } @@ -14777,7 +15965,7 @@ export const DatasetGZipCompression: coreHttp.CompositeMapper = { level: { serializedName: "level", type: { - name: "String" + name: "any" } } } @@ -14797,7 +15985,7 @@ export const DatasetDeflateCompression: coreHttp.CompositeMapper = { level: { serializedName: "level", type: { - name: "String" + name: "any" } } } @@ -14817,7 +16005,41 @@ export const DatasetZipDeflateCompression: coreHttp.CompositeMapper = { level: { serializedName: "level", type: { - name: "String" + name: "any" + } + } + } + } +}; + +export const DatasetTarCompression: coreHttp.CompositeMapper = { + serializedName: "Tar", + type: { + name: "Composite", + className: "DatasetTarCompression", + uberParent: "DatasetCompression", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: DatasetCompression.type.polymorphicDiscriminator, + modelProperties: { + ...DatasetCompression.type.modelProperties + } + } +}; + +export const DatasetTarGZipCompression: coreHttp.CompositeMapper = { + serializedName: "TarGZip", + type: { + name: "Composite", + className: "DatasetTarGZipCompression", + uberParent: "DatasetCompression", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: DatasetCompression.type.polymorphicDiscriminator, + modelProperties: { + ...DatasetCompression.type.modelProperties, + level: { + serializedName: "level", + type: { + name: "any" } } } @@ -14830,7 +16052,8 @@ export const WebAnonymousAuthentication: coreHttp.CompositeMapper = { name: "Composite", className: "WebAnonymousAuthentication", uberParent: "WebLinkedServiceTypeProperties", - polymorphicDiscriminator: WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, + polymorphicDiscriminator: + WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, modelProperties: { ...WebLinkedServiceTypeProperties.type.modelProperties } @@ -14843,7 +16066,8 @@ export const WebBasicAuthentication: coreHttp.CompositeMapper = { name: "Composite", className: "WebBasicAuthentication", uberParent: "WebLinkedServiceTypeProperties", - polymorphicDiscriminator: WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, + polymorphicDiscriminator: + WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, modelProperties: { ...WebLinkedServiceTypeProperties.type.modelProperties, username: { @@ -14870,7 +16094,8 @@ export const WebClientCertificateAuthentication: coreHttp.CompositeMapper = { name: "Composite", className: "WebClientCertificateAuthentication", uberParent: "WebLinkedServiceTypeProperties", - polymorphicDiscriminator: WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, + polymorphicDiscriminator: + WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, modelProperties: { ...WebLinkedServiceTypeProperties.type.modelProperties, pfx: { @@ -14925,12 +16150,30 @@ export const AzureBlobStorageReadSettings: coreHttp.CompositeMapper = { name: "any" } }, + fileListPath: { + serializedName: "fileListPath", + type: { + name: "any" + } + }, enablePartitionDiscovery: { serializedName: "enablePartitionDiscovery", type: { name: "Boolean" } }, + partitionRootPath: { + serializedName: "partitionRootPath", + type: { + name: "any" + } + }, + deleteFilesAfterCompletion: { + serializedName: "deleteFilesAfterCompletion", + type: { + name: "any" + } + }, modifiedDatetimeStart: { serializedName: "modifiedDatetimeStart", type: { @@ -14975,12 +16218,30 @@ export const AzureBlobFSReadSettings: coreHttp.CompositeMapper = { name: "any" } }, + fileListPath: { + serializedName: "fileListPath", + type: { + name: "any" + } + }, enablePartitionDiscovery: { serializedName: "enablePartitionDiscovery", type: { name: "Boolean" } }, + partitionRootPath: { + serializedName: "partitionRootPath", + type: { + name: "any" + } + }, + deleteFilesAfterCompletion: { + serializedName: "deleteFilesAfterCompletion", + type: { + name: "any" + } + }, modifiedDatetimeStart: { serializedName: "modifiedDatetimeStart", type: { @@ -15025,12 +16286,42 @@ export const AzureDataLakeStoreReadSettings: coreHttp.CompositeMapper = { name: "any" } }, + fileListPath: { + serializedName: "fileListPath", + type: { + name: "any" + } + }, + listAfter: { + serializedName: "listAfter", + type: { + name: "any" + } + }, + listBefore: { + serializedName: "listBefore", + type: { + name: "any" + } + }, enablePartitionDiscovery: { serializedName: "enablePartitionDiscovery", type: { name: "Boolean" } }, + partitionRootPath: { + serializedName: "partitionRootPath", + type: { + name: "any" + } + }, + deleteFilesAfterCompletion: { + serializedName: "deleteFilesAfterCompletion", + type: { + name: "any" + } + }, modifiedDatetimeStart: { serializedName: "modifiedDatetimeStart", type: { @@ -15081,12 +16372,30 @@ export const AmazonS3ReadSettings: coreHttp.CompositeMapper = { name: "any" } }, + fileListPath: { + serializedName: "fileListPath", + type: { + name: "any" + } + }, enablePartitionDiscovery: { serializedName: "enablePartitionDiscovery", type: { name: "Boolean" } }, + partitionRootPath: { + serializedName: "partitionRootPath", + type: { + name: "any" + } + }, + deleteFilesAfterCompletion: { + serializedName: "deleteFilesAfterCompletion", + type: { + name: "any" + } + }, modifiedDatetimeStart: { serializedName: "modifiedDatetimeStart", type: { @@ -15131,12 +16440,30 @@ export const FileServerReadSettings: coreHttp.CompositeMapper = { name: "any" } }, + fileListPath: { + serializedName: "fileListPath", + type: { + name: "any" + } + }, enablePartitionDiscovery: { serializedName: "enablePartitionDiscovery", type: { name: "Boolean" } }, + partitionRootPath: { + serializedName: "partitionRootPath", + type: { + name: "any" + } + }, + deleteFilesAfterCompletion: { + serializedName: "deleteFilesAfterCompletion", + type: { + name: "any" + } + }, modifiedDatetimeStart: { serializedName: "modifiedDatetimeStart", type: { @@ -15148,6 +16475,12 @@ export const FileServerReadSettings: coreHttp.CompositeMapper = { type: { name: "any" } + }, + fileFilter: { + serializedName: "fileFilter", + type: { + name: "any" + } } } } @@ -15181,12 +16514,36 @@ export const AzureFileStorageReadSettings: coreHttp.CompositeMapper = { name: "any" } }, + prefix: { + serializedName: "prefix", + type: { + name: "any" + } + }, + fileListPath: { + serializedName: "fileListPath", + type: { + name: "any" + } + }, enablePartitionDiscovery: { serializedName: "enablePartitionDiscovery", type: { name: "Boolean" } }, + partitionRootPath: { + serializedName: "partitionRootPath", + type: { + name: "any" + } + }, + deleteFilesAfterCompletion: { + serializedName: "deleteFilesAfterCompletion", + type: { + name: "any" + } + }, modifiedDatetimeStart: { serializedName: "modifiedDatetimeStart", type: { @@ -15237,12 +16594,30 @@ export const GoogleCloudStorageReadSettings: coreHttp.CompositeMapper = { name: "any" } }, + fileListPath: { + serializedName: "fileListPath", + type: { + name: "any" + } + }, enablePartitionDiscovery: { serializedName: "enablePartitionDiscovery", type: { name: "Boolean" } }, + partitionRootPath: { + serializedName: "partitionRootPath", + type: { + name: "any" + } + }, + deleteFilesAfterCompletion: { + serializedName: "deleteFilesAfterCompletion", + type: { + name: "any" + } + }, modifiedDatetimeStart: { serializedName: "modifiedDatetimeStart", type: { @@ -15287,6 +16662,30 @@ export const FtpReadSettings: coreHttp.CompositeMapper = { name: "any" } }, + enablePartitionDiscovery: { + serializedName: "enablePartitionDiscovery", + type: { + name: "Boolean" + } + }, + partitionRootPath: { + serializedName: "partitionRootPath", + type: { + name: "any" + } + }, + deleteFilesAfterCompletion: { + serializedName: "deleteFilesAfterCompletion", + type: { + name: "any" + } + }, + fileListPath: { + serializedName: "fileListPath", + type: { + name: "any" + } + }, useBinaryTransfer: { serializedName: "useBinaryTransfer", type: { @@ -15325,6 +16724,30 @@ export const SftpReadSettings: coreHttp.CompositeMapper = { name: "any" } }, + enablePartitionDiscovery: { + serializedName: "enablePartitionDiscovery", + type: { + name: "Boolean" + } + }, + partitionRootPath: { + serializedName: "partitionRootPath", + type: { + name: "any" + } + }, + fileListPath: { + serializedName: "fileListPath", + type: { + name: "any" + } + }, + deleteFilesAfterCompletion: { + serializedName: "deleteFilesAfterCompletion", + type: { + name: "any" + } + }, modifiedDatetimeStart: { serializedName: "modifiedDatetimeStart", type: { @@ -15374,6 +16797,18 @@ export const HttpReadSettings: coreHttp.CompositeMapper = { type: { name: "any" } + }, + enablePartitionDiscovery: { + serializedName: "enablePartitionDiscovery", + type: { + name: "Boolean" + } + }, + partitionRootPath: { + serializedName: "partitionRootPath", + type: { + name: "any" + } } } } @@ -15398,76 +16833,303 @@ export const HdfsReadSettings: coreHttp.CompositeMapper = { wildcardFolderPath: { serializedName: "wildcardFolderPath", type: { - name: "any" + name: "any" + } + }, + wildcardFileName: { + serializedName: "wildcardFileName", + type: { + name: "any" + } + }, + fileListPath: { + serializedName: "fileListPath", + type: { + name: "any" + } + }, + enablePartitionDiscovery: { + serializedName: "enablePartitionDiscovery", + type: { + name: "Boolean" + } + }, + partitionRootPath: { + serializedName: "partitionRootPath", + type: { + name: "any" + } + }, + modifiedDatetimeStart: { + serializedName: "modifiedDatetimeStart", + type: { + name: "any" + } + }, + modifiedDatetimeEnd: { + serializedName: "modifiedDatetimeEnd", + type: { + name: "any" + } + }, + distcpSettings: { + serializedName: "distcpSettings", + type: { + name: "Composite", + className: "DistcpSettings" + } + }, + deleteFilesAfterCompletion: { + serializedName: "deleteFilesAfterCompletion", + type: { + name: "any" + } + } + } + } +}; + +export const SftpWriteSettings: coreHttp.CompositeMapper = { + serializedName: "SftpWriteSettings", + type: { + name: "Composite", + className: "SftpWriteSettings", + uberParent: "StoreWriteSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: StoreWriteSettings.type.polymorphicDiscriminator, + modelProperties: { + ...StoreWriteSettings.type.modelProperties, + operationTimeout: { + serializedName: "operationTimeout", + type: { + name: "any" + } + }, + useTempFileRename: { + serializedName: "useTempFileRename", + type: { + name: "any" + } + } + } + } +}; + +export const AzureBlobStorageWriteSettings: coreHttp.CompositeMapper = { + serializedName: "AzureBlobStorageWriteSettings", + type: { + name: "Composite", + className: "AzureBlobStorageWriteSettings", + uberParent: "StoreWriteSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: StoreWriteSettings.type.polymorphicDiscriminator, + modelProperties: { + ...StoreWriteSettings.type.modelProperties, + blockSizeInMB: { + serializedName: "blockSizeInMB", + type: { + name: "any" + } + } + } + } +}; + +export const AzureBlobFSWriteSettings: coreHttp.CompositeMapper = { + serializedName: "AzureBlobFSWriteSettings", + type: { + name: "Composite", + className: "AzureBlobFSWriteSettings", + uberParent: "StoreWriteSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: StoreWriteSettings.type.polymorphicDiscriminator, + modelProperties: { + ...StoreWriteSettings.type.modelProperties, + blockSizeInMB: { + serializedName: "blockSizeInMB", + type: { + name: "any" + } + } + } + } +}; + +export const AzureDataLakeStoreWriteSettings: coreHttp.CompositeMapper = { + serializedName: "AzureDataLakeStoreWriteSettings", + type: { + name: "Composite", + className: "AzureDataLakeStoreWriteSettings", + uberParent: "StoreWriteSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: StoreWriteSettings.type.polymorphicDiscriminator, + modelProperties: { + ...StoreWriteSettings.type.modelProperties, + expiryDateTime: { + serializedName: "expiryDateTime", + type: { + name: "any" + } + } + } + } +}; + +export const FileServerWriteSettings: coreHttp.CompositeMapper = { + serializedName: "FileServerWriteSettings", + type: { + name: "Composite", + className: "FileServerWriteSettings", + uberParent: "StoreWriteSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: StoreWriteSettings.type.polymorphicDiscriminator, + modelProperties: { + ...StoreWriteSettings.type.modelProperties + } + } +}; + +export const AzureFileStorageWriteSettings: coreHttp.CompositeMapper = { + serializedName: "AzureFileStorageWriteSettings", + type: { + name: "Composite", + className: "AzureFileStorageWriteSettings", + uberParent: "StoreWriteSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: StoreWriteSettings.type.polymorphicDiscriminator, + modelProperties: { + ...StoreWriteSettings.type.modelProperties + } + } +}; + +export const DelimitedTextReadSettings: coreHttp.CompositeMapper = { + serializedName: "DelimitedTextReadSettings", + type: { + name: "Composite", + className: "DelimitedTextReadSettings", + uberParent: "FormatReadSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: FormatReadSettings.type.polymorphicDiscriminator, + modelProperties: { + ...FormatReadSettings.type.modelProperties, + skipLineCount: { + serializedName: "skipLineCount", + type: { + name: "any" + } + }, + compressionProperties: { + serializedName: "compressionProperties", + type: { + name: "Composite", + className: "CompressionReadSettings" + } + } + } + } +}; + +export const JsonReadSettings: coreHttp.CompositeMapper = { + serializedName: "JsonReadSettings", + type: { + name: "Composite", + className: "JsonReadSettings", + uberParent: "FormatReadSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: FormatReadSettings.type.polymorphicDiscriminator, + modelProperties: { + ...FormatReadSettings.type.modelProperties, + compressionProperties: { + serializedName: "compressionProperties", + type: { + name: "Composite", + className: "CompressionReadSettings" + } + } + } + } +}; + +export const XmlReadSettings: coreHttp.CompositeMapper = { + serializedName: "XmlReadSettings", + type: { + name: "Composite", + className: "XmlReadSettings", + uberParent: "FormatReadSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: FormatReadSettings.type.polymorphicDiscriminator, + modelProperties: { + ...FormatReadSettings.type.modelProperties, + compressionProperties: { + serializedName: "compressionProperties", + type: { + name: "Composite", + className: "CompressionReadSettings" } }, - wildcardFileName: { - serializedName: "wildcardFileName", + validationMode: { + serializedName: "validationMode", type: { name: "any" } }, - enablePartitionDiscovery: { - serializedName: "enablePartitionDiscovery", - type: { - name: "Boolean" - } - }, - modifiedDatetimeStart: { - serializedName: "modifiedDatetimeStart", + detectDataType: { + serializedName: "detectDataType", type: { name: "any" } }, - modifiedDatetimeEnd: { - serializedName: "modifiedDatetimeEnd", + namespaces: { + serializedName: "namespaces", type: { name: "any" } }, - distcpSettings: { - serializedName: "distcpSettings", + namespacePrefixes: { + serializedName: "namespacePrefixes", type: { - name: "Composite", - className: "DistcpSettings" + name: "any" } } } } }; -export const SftpWriteSettings: coreHttp.CompositeMapper = { - serializedName: "SftpWriteSettings", +export const BinaryReadSettings: coreHttp.CompositeMapper = { + serializedName: "BinaryReadSettings", type: { name: "Composite", - className: "SftpWriteSettings", - uberParent: "StoreWriteSettings", + className: "BinaryReadSettings", + uberParent: "FormatReadSettings", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: StoreWriteSettings.type.polymorphicDiscriminator, + polymorphicDiscriminator: FormatReadSettings.type.polymorphicDiscriminator, modelProperties: { - ...StoreWriteSettings.type.modelProperties, - operationTimeout: { - serializedName: "operationTimeout", + ...FormatReadSettings.type.modelProperties, + compressionProperties: { + serializedName: "compressionProperties", type: { - name: "any" + name: "Composite", + className: "CompressionReadSettings" } } } } }; -export const AzureBlobStorageWriteSettings: coreHttp.CompositeMapper = { - serializedName: "AzureBlobStorageWriteSettings", +export const ZipDeflateReadSettings: coreHttp.CompositeMapper = { + serializedName: "ZipDeflateReadSettings", type: { name: "Composite", - className: "AzureBlobStorageWriteSettings", - uberParent: "StoreWriteSettings", + className: "ZipDeflateReadSettings", + uberParent: "CompressionReadSettings", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: StoreWriteSettings.type.polymorphicDiscriminator, + polymorphicDiscriminator: + CompressionReadSettings.type.polymorphicDiscriminator, modelProperties: { - ...StoreWriteSettings.type.modelProperties, - blockSizeInMB: { - serializedName: "blockSizeInMB", + ...CompressionReadSettings.type.modelProperties, + preserveZipFileNameAsFolder: { + serializedName: "preserveZipFileNameAsFolder", type: { name: "any" } @@ -15476,18 +17138,19 @@ export const AzureBlobStorageWriteSettings: coreHttp.CompositeMapper = { } }; -export const AzureBlobFSWriteSettings: coreHttp.CompositeMapper = { - serializedName: "AzureBlobFSWriteSettings", +export const TarReadSettings: coreHttp.CompositeMapper = { + serializedName: "TarReadSettings", type: { name: "Composite", - className: "AzureBlobFSWriteSettings", - uberParent: "StoreWriteSettings", + className: "TarReadSettings", + uberParent: "CompressionReadSettings", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: StoreWriteSettings.type.polymorphicDiscriminator, + polymorphicDiscriminator: + CompressionReadSettings.type.polymorphicDiscriminator, modelProperties: { - ...StoreWriteSettings.type.modelProperties, - blockSizeInMB: { - serializedName: "blockSizeInMB", + ...CompressionReadSettings.type.modelProperties, + preserveCompressionFileNameAsFolder: { + serializedName: "preserveCompressionFileNameAsFolder", type: { name: "any" } @@ -15496,46 +17159,83 @@ export const AzureBlobFSWriteSettings: coreHttp.CompositeMapper = { } }; -export const AzureDataLakeStoreWriteSettings: coreHttp.CompositeMapper = { - serializedName: "AzureDataLakeStoreWriteSettings", +export const TarGZipReadSettings: coreHttp.CompositeMapper = { + serializedName: "TarGZipReadSettings", type: { name: "Composite", - className: "AzureDataLakeStoreWriteSettings", - uberParent: "StoreWriteSettings", + className: "TarGZipReadSettings", + uberParent: "CompressionReadSettings", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: StoreWriteSettings.type.polymorphicDiscriminator, + polymorphicDiscriminator: + CompressionReadSettings.type.polymorphicDiscriminator, modelProperties: { - ...StoreWriteSettings.type.modelProperties + ...CompressionReadSettings.type.modelProperties, + preserveCompressionFileNameAsFolder: { + serializedName: "preserveCompressionFileNameAsFolder", + type: { + name: "any" + } + } } } }; -export const FileServerWriteSettings: coreHttp.CompositeMapper = { - serializedName: "FileServerWriteSettings", +export const AvroWriteSettings: coreHttp.CompositeMapper = { + serializedName: "AvroWriteSettings", type: { name: "Composite", - className: "FileServerWriteSettings", - uberParent: "StoreWriteSettings", + className: "AvroWriteSettings", + uberParent: "FormatWriteSettings", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: StoreWriteSettings.type.polymorphicDiscriminator, + polymorphicDiscriminator: FormatWriteSettings.type.polymorphicDiscriminator, modelProperties: { - ...StoreWriteSettings.type.modelProperties + ...FormatWriteSettings.type.modelProperties, + recordName: { + serializedName: "recordName", + type: { + name: "String" + } + }, + recordNamespace: { + serializedName: "recordNamespace", + type: { + name: "String" + } + }, + maxRowsPerFile: { + serializedName: "maxRowsPerFile", + type: { + name: "any" + } + }, + fileNamePrefix: { + serializedName: "fileNamePrefix", + type: { + name: "any" + } + } } } }; -export const DelimitedTextReadSettings: coreHttp.CompositeMapper = { - serializedName: "DelimitedTextReadSettings", +export const OrcWriteSettings: coreHttp.CompositeMapper = { + serializedName: "OrcWriteSettings", type: { name: "Composite", - className: "DelimitedTextReadSettings", - uberParent: "FormatReadSettings", + className: "OrcWriteSettings", + uberParent: "FormatWriteSettings", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: FormatReadSettings.type.polymorphicDiscriminator, + polymorphicDiscriminator: FormatWriteSettings.type.polymorphicDiscriminator, modelProperties: { - ...FormatReadSettings.type.modelProperties, - skipLineCount: { - serializedName: "skipLineCount", + ...FormatWriteSettings.type.modelProperties, + maxRowsPerFile: { + serializedName: "maxRowsPerFile", + type: { + name: "any" + } + }, + fileNamePrefix: { + serializedName: "fileNamePrefix", type: { name: "any" } @@ -15544,26 +17244,26 @@ export const DelimitedTextReadSettings: coreHttp.CompositeMapper = { } }; -export const AvroWriteSettings: coreHttp.CompositeMapper = { - serializedName: "AvroWriteSettings", +export const ParquetWriteSettings: coreHttp.CompositeMapper = { + serializedName: "ParquetWriteSettings", type: { name: "Composite", - className: "AvroWriteSettings", + className: "ParquetWriteSettings", uberParent: "FormatWriteSettings", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: FormatWriteSettings.type.polymorphicDiscriminator, modelProperties: { ...FormatWriteSettings.type.modelProperties, - recordName: { - serializedName: "recordName", + maxRowsPerFile: { + serializedName: "maxRowsPerFile", type: { - name: "String" + name: "any" } }, - recordNamespace: { - serializedName: "recordNamespace", + fileNamePrefix: { + serializedName: "fileNamePrefix", type: { - name: "String" + name: "any" } } } @@ -15592,6 +17292,18 @@ export const DelimitedTextWriteSettings: coreHttp.CompositeMapper = { type: { name: "any" } + }, + maxRowsPerFile: { + serializedName: "maxRowsPerFile", + type: { + name: "any" + } + }, + fileNamePrefix: { + serializedName: "fileNamePrefix", + type: { + name: "any" + } } } } @@ -15633,6 +17345,51 @@ export const AvroSource: coreHttp.CompositeMapper = { name: "Composite", className: "StoreReadSettings" } + }, + additionalColumns: { + serializedName: "additionalColumns", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "AdditionalColumns" + } + } + } + } + } + } +}; + +export const ExcelSource: coreHttp.CompositeMapper = { + serializedName: "ExcelSource", + type: { + name: "Composite", + className: "ExcelSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + storeSettings: { + serializedName: "storeSettings", + type: { + name: "Composite", + className: "StoreReadSettings" + } + }, + additionalColumns: { + serializedName: "additionalColumns", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "AdditionalColumns" + } + } + } } } } @@ -15654,6 +17411,18 @@ export const ParquetSource: coreHttp.CompositeMapper = { name: "Composite", className: "StoreReadSettings" } + }, + additionalColumns: { + serializedName: "additionalColumns", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "AdditionalColumns" + } + } + } } } } @@ -15682,6 +17451,18 @@ export const DelimitedTextSource: coreHttp.CompositeMapper = { name: "Composite", className: "DelimitedTextReadSettings" } + }, + additionalColumns: { + serializedName: "additionalColumns", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "AdditionalColumns" + } + } + } } } } @@ -15703,6 +17484,65 @@ export const JsonSource: coreHttp.CompositeMapper = { name: "Composite", className: "StoreReadSettings" } + }, + formatSettings: { + serializedName: "formatSettings", + type: { + name: "Composite", + className: "JsonReadSettings" + } + }, + additionalColumns: { + serializedName: "additionalColumns", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "AdditionalColumns" + } + } + } + } + } + } +}; + +export const XmlSource: coreHttp.CompositeMapper = { + serializedName: "XmlSource", + type: { + name: "Composite", + className: "XmlSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + storeSettings: { + serializedName: "storeSettings", + type: { + name: "Composite", + className: "StoreReadSettings" + } + }, + formatSettings: { + serializedName: "formatSettings", + type: { + name: "Composite", + className: "XmlReadSettings" + } + }, + additionalColumns: { + serializedName: "additionalColumns", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "AdditionalColumns" + } + } + } } } } @@ -15724,6 +17564,18 @@ export const OrcSource: coreHttp.CompositeMapper = { name: "Composite", className: "StoreReadSettings" } + }, + additionalColumns: { + serializedName: "additionalColumns", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "AdditionalColumns" + } + } + } } } } @@ -15745,6 +17597,13 @@ export const BinarySource: coreHttp.CompositeMapper = { name: "Composite", className: "StoreReadSettings" } + }, + formatSettings: { + serializedName: "formatSettings", + type: { + name: "Composite", + className: "BinaryReadSettings" + } } } } @@ -15768,6 +17627,18 @@ export const TabularSource: coreHttp.CompositeMapper = { type: { name: "any" } + }, + additionalColumns: { + serializedName: "additionalColumns", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "AdditionalColumns" + } + } + } } } } @@ -15832,6 +17703,18 @@ export const DocumentDbCollectionSource: coreHttp.CompositeMapper = { type: { name: "any" } + }, + additionalColumns: { + serializedName: "additionalColumns", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "AdditionalColumns" + } + } + } } } } @@ -15864,6 +17747,24 @@ export const CosmosDbSqlApiSource: coreHttp.CompositeMapper = { type: { name: "any" } + }, + detectDatetime: { + serializedName: "detectDatetime", + type: { + name: "any" + } + }, + additionalColumns: { + serializedName: "additionalColumns", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "AdditionalColumns" + } + } + } } } } @@ -15884,6 +17785,18 @@ export const DynamicsSource: coreHttp.CompositeMapper = { type: { name: "any" } + }, + additionalColumns: { + serializedName: "additionalColumns", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "AdditionalColumns" + } + } + } } } } @@ -15904,6 +17817,18 @@ export const DynamicsCrmSource: coreHttp.CompositeMapper = { type: { name: "any" } + }, + additionalColumns: { + serializedName: "additionalColumns", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "AdditionalColumns" + } + } + } } } } @@ -15924,6 +17849,18 @@ export const CommonDataServiceForAppsSource: coreHttp.CompositeMapper = { type: { name: "any" } + }, + additionalColumns: { + serializedName: "additionalColumns", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "AdditionalColumns" + } + } + } } } } @@ -15944,6 +17881,18 @@ export const RelationalSource: coreHttp.CompositeMapper = { type: { name: "any" } + }, + additionalColumns: { + serializedName: "additionalColumns", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "AdditionalColumns" + } + } + } } } } @@ -15964,6 +17913,18 @@ export const MicrosoftAccessSource: coreHttp.CompositeMapper = { type: { name: "any" } + }, + additionalColumns: { + serializedName: "additionalColumns", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "AdditionalColumns" + } + } + } } } } @@ -15984,6 +17945,24 @@ export const ODataSource: coreHttp.CompositeMapper = { type: { name: "any" } + }, + httpRequestTimeout: { + serializedName: "httpRequestTimeout", + type: { + name: "any" + } + }, + additionalColumns: { + serializedName: "additionalColumns", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "AdditionalColumns" + } + } + } } } } @@ -16010,6 +17989,18 @@ export const SalesforceServiceCloudSource: coreHttp.CompositeMapper = { type: { name: "String" } + }, + additionalColumns: { + serializedName: "additionalColumns", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "AdditionalColumns" + } + } + } } } } @@ -16060,6 +18051,18 @@ export const RestSource: coreHttp.CompositeMapper = { type: { name: "any" } + }, + additionalColumns: { + serializedName: "additionalColumns", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "AdditionalColumns" + } + } + } } } } @@ -16080,6 +18083,18 @@ export const FileSystemSource: coreHttp.CompositeMapper = { type: { name: "any" } + }, + additionalColumns: { + serializedName: "additionalColumns", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "AdditionalColumns" + } + } + } } } } @@ -16140,6 +18155,18 @@ export const AzureDataExplorerSource: coreHttp.CompositeMapper = { type: { name: "any" } + }, + additionalColumns: { + serializedName: "additionalColumns", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "AdditionalColumns" + } + } + } } } } @@ -16179,6 +18206,18 @@ export const OracleSource: coreHttp.CompositeMapper = { name: "Composite", className: "OraclePartitionSettings" } + }, + additionalColumns: { + serializedName: "additionalColumns", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "AdditionalColumns" + } + } + } } } } @@ -16193,7 +18232,19 @@ export const WebSource: coreHttp.CompositeMapper = { additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, modelProperties: { - ...CopySource.type.modelProperties + ...CopySource.type.modelProperties, + additionalColumns: { + serializedName: "additionalColumns", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "AdditionalColumns" + } + } + } + } } } }; @@ -16213,6 +18264,69 @@ export const MongoDbSource: coreHttp.CompositeMapper = { type: { name: "any" } + }, + additionalColumns: { + serializedName: "additionalColumns", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "AdditionalColumns" + } + } + } + } + } + } +}; + +export const MongoDbAtlasSource: coreHttp.CompositeMapper = { + serializedName: "MongoDbAtlasSource", + type: { + name: "Composite", + className: "MongoDbAtlasSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + filter: { + serializedName: "filter", + type: { + name: "any" + } + }, + cursorMethods: { + serializedName: "cursorMethods", + type: { + name: "Composite", + className: "MongoDbCursorMethodsProperties" + } + }, + batchSize: { + serializedName: "batchSize", + type: { + name: "any" + } + }, + queryTimeout: { + serializedName: "queryTimeout", + type: { + name: "any" + } + }, + additionalColumns: { + serializedName: "additionalColumns", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "AdditionalColumns" + } + } + } } } } @@ -16252,6 +18366,18 @@ export const MongoDbV2Source: coreHttp.CompositeMapper = { type: { name: "any" } + }, + additionalColumns: { + serializedName: "additionalColumns", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "AdditionalColumns" + } + } + } } } } @@ -16291,6 +18417,18 @@ export const CosmosDbMongoDbApiSource: coreHttp.CompositeMapper = { type: { name: "any" } + }, + additionalColumns: { + serializedName: "additionalColumns", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "AdditionalColumns" + } + } + } } } } @@ -16330,14 +18468,66 @@ export const Office365Source: coreHttp.CompositeMapper = { name: "any" } }, - endTime: { - serializedName: "endTime", + endTime: { + serializedName: "endTime", + type: { + name: "any" + } + }, + outputColumns: { + serializedName: "outputColumns", + type: { + name: "any" + } + } + } + } +}; + +export const AzureDataLakeStoreSource: coreHttp.CompositeMapper = { + serializedName: "AzureDataLakeStoreSource", + type: { + name: "Composite", + className: "AzureDataLakeStoreSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + recursive: { + serializedName: "recursive", + type: { + name: "any" + } + } + } + } +}; + +export const AzureBlobFSSource: coreHttp.CompositeMapper = { + serializedName: "AzureBlobFSSource", + type: { + name: "Composite", + className: "AzureBlobFSSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + treatEmptyAsNull: { + serializedName: "treatEmptyAsNull", + type: { + name: "any" + } + }, + skipHeaderLineCount: { + serializedName: "skipHeaderLineCount", type: { name: "any" } }, - outputColumns: { - serializedName: "outputColumns", + recursive: { + serializedName: "recursive", type: { name: "any" } @@ -16346,18 +18536,18 @@ export const Office365Source: coreHttp.CompositeMapper = { } }; -export const AzureDataLakeStoreSource: coreHttp.CompositeMapper = { - serializedName: "AzureDataLakeStoreSource", +export const HttpSource: coreHttp.CompositeMapper = { + serializedName: "HttpSource", type: { name: "Composite", - className: "AzureDataLakeStoreSource", + className: "HttpSource", uberParent: "CopySource", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, modelProperties: { ...CopySource.type.modelProperties, - recursive: { - serializedName: "recursive", + httpRequestTimeout: { + serializedName: "httpRequestTimeout", type: { name: "any" } @@ -16366,48 +18556,76 @@ export const AzureDataLakeStoreSource: coreHttp.CompositeMapper = { } }; -export const AzureBlobFSSource: coreHttp.CompositeMapper = { - serializedName: "AzureBlobFSSource", +export const SnowflakeSource: coreHttp.CompositeMapper = { + serializedName: "SnowflakeSource", type: { name: "Composite", - className: "AzureBlobFSSource", + className: "SnowflakeSource", uberParent: "CopySource", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, modelProperties: { ...CopySource.type.modelProperties, - treatEmptyAsNull: { - serializedName: "treatEmptyAsNull", + query: { + serializedName: "query", type: { name: "any" } }, - skipHeaderLineCount: { - serializedName: "skipHeaderLineCount", + exportSettings: { + serializedName: "exportSettings", + type: { + name: "Composite", + className: "SnowflakeExportCopyCommand" + } + } + } + } +}; + +export const AzureDatabricksDeltaLakeSource: coreHttp.CompositeMapper = { + serializedName: "AzureDatabricksDeltaLakeSource", + type: { + name: "Composite", + className: "AzureDatabricksDeltaLakeSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + query: { + serializedName: "query", type: { name: "any" } }, - recursive: { - serializedName: "recursive", + exportSettings: { + serializedName: "exportSettings", type: { - name: "any" + name: "Composite", + className: "AzureDatabricksDeltaLakeExportCommand" } } } } }; -export const HttpSource: coreHttp.CompositeMapper = { - serializedName: "HttpSource", +export const SharePointOnlineListSource: coreHttp.CompositeMapper = { + serializedName: "SharePointOnlineListSource", type: { name: "Composite", - className: "HttpSource", + className: "SharePointOnlineListSource", uberParent: "CopySource", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, modelProperties: { ...CopySource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + }, httpRequestTimeout: { serializedName: "httpRequestTimeout", type: { @@ -16490,6 +18708,57 @@ export const OrcSink: coreHttp.CompositeMapper = { name: "Composite", className: "StoreWriteSettings" } + }, + formatSettings: { + serializedName: "formatSettings", + type: { + name: "Composite", + className: "OrcWriteSettings" + } + } + } + } +}; + +export const RestSink: coreHttp.CompositeMapper = { + serializedName: "RestSink", + type: { + name: "Composite", + className: "RestSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + requestMethod: { + serializedName: "requestMethod", + type: { + name: "any" + } + }, + additionalHeaders: { + serializedName: "additionalHeaders", + type: { + name: "any" + } + }, + httpRequestTimeout: { + serializedName: "httpRequestTimeout", + type: { + name: "any" + } + }, + requestInterval: { + serializedName: "requestInterval", + type: { + name: "any" + } + }, + httpCompressionType: { + serializedName: "httpCompressionType", + type: { + name: "any" + } } } } @@ -16535,6 +18804,33 @@ export const AzureMySqlSink: coreHttp.CompositeMapper = { } }; +export const AzureDatabricksDeltaLakeSink: coreHttp.CompositeMapper = { + serializedName: "AzureDatabricksDeltaLakeSink", + type: { + name: "Composite", + className: "AzureDatabricksDeltaLakeSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + preCopyScript: { + serializedName: "preCopyScript", + type: { + name: "any" + } + }, + importSettings: { + serializedName: "importSettings", + type: { + name: "Composite", + className: "AzureDatabricksDeltaLakeImportCommand" + } + } + } + } +}; + export const SapCloudForCustomerSink: coreHttp.CompositeMapper = { serializedName: "SapCloudForCustomerSink", type: { @@ -16550,6 +18846,12 @@ export const SapCloudForCustomerSink: coreHttp.CompositeMapper = { type: { name: "String" } + }, + httpRequestTimeout: { + serializedName: "httpRequestTimeout", + type: { + name: "any" + } } } } @@ -16651,6 +18953,13 @@ export const ParquetSink: coreHttp.CompositeMapper = { name: "Composite", className: "StoreWriteSettings" } + }, + formatSettings: { + serializedName: "formatSettings", + type: { + name: "Composite", + className: "ParquetWriteSettings" + } } } } @@ -17045,6 +19354,33 @@ export const SqlDWSink: coreHttp.CompositeMapper = { } }; +export const SnowflakeSink: coreHttp.CompositeMapper = { + serializedName: "SnowflakeSink", + type: { + name: "Composite", + className: "SnowflakeSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + preCopyScript: { + serializedName: "preCopyScript", + type: { + name: "any" + } + }, + importSettings: { + serializedName: "importSettings", + type: { + name: "Composite", + className: "SnowflakeImportCopyCommand" + } + } + } + } +}; + export const OracleSink: coreHttp.CompositeMapper = { serializedName: "OracleSink", type: { @@ -17309,43 +19645,149 @@ export const AzureDataExplorerSink: coreHttp.CompositeMapper = { ingestionMappingAsJson: { serializedName: "ingestionMappingAsJson", type: { - name: "any" + name: "any" + } + }, + flushImmediately: { + serializedName: "flushImmediately", + type: { + name: "any" + } + } + } + } +}; + +export const SalesforceSink: coreHttp.CompositeMapper = { + serializedName: "SalesforceSink", + type: { + name: "Composite", + className: "SalesforceSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + writeBehavior: { + serializedName: "writeBehavior", + type: { + name: "String" + } + }, + externalIdFieldName: { + serializedName: "externalIdFieldName", + type: { + name: "any" + } + }, + ignoreNullValues: { + serializedName: "ignoreNullValues", + type: { + name: "any" + } + } + } + } +}; + +export const SalesforceServiceCloudSink: coreHttp.CompositeMapper = { + serializedName: "SalesforceServiceCloudSink", + type: { + name: "Composite", + className: "SalesforceServiceCloudSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + writeBehavior: { + serializedName: "writeBehavior", + type: { + name: "String" + } + }, + externalIdFieldName: { + serializedName: "externalIdFieldName", + type: { + name: "any" + } + }, + ignoreNullValues: { + serializedName: "ignoreNullValues", + type: { + name: "any" + } + } + } + } +}; + +export const CosmosDbMongoDbApiSink: coreHttp.CompositeMapper = { + serializedName: "CosmosDbMongoDbApiSink", + type: { + name: "Composite", + className: "CosmosDbMongoDbApiSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + writeBehavior: { + serializedName: "writeBehavior", + type: { + name: "any" + } + } + } + } +}; + +export const SnowflakeExportCopyCommand: coreHttp.CompositeMapper = { + serializedName: "SnowflakeExportCopyCommand", + type: { + name: "Composite", + className: "SnowflakeExportCopyCommand", + uberParent: "ExportSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: ExportSettings.type.polymorphicDiscriminator, + modelProperties: { + ...ExportSettings.type.modelProperties, + additionalCopyOptions: { + serializedName: "additionalCopyOptions", + type: { + name: "Dictionary", + value: { type: { name: "any" } } } }, - flushImmediately: { - serializedName: "flushImmediately", + additionalFormatOptions: { + serializedName: "additionalFormatOptions", type: { - name: "any" + name: "Dictionary", + value: { type: { name: "any" } } } } } } }; -export const SalesforceSink: coreHttp.CompositeMapper = { - serializedName: "SalesforceSink", +export const AzureDatabricksDeltaLakeExportCommand: coreHttp.CompositeMapper = { + serializedName: "AzureDatabricksDeltaLakeExportCommand", type: { name: "Composite", - className: "SalesforceSink", - uberParent: "CopySink", + className: "AzureDatabricksDeltaLakeExportCommand", + uberParent: "ExportSettings", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + polymorphicDiscriminator: ExportSettings.type.polymorphicDiscriminator, modelProperties: { - ...CopySink.type.modelProperties, - writeBehavior: { - serializedName: "writeBehavior", - type: { - name: "String" - } - }, - externalIdFieldName: { - serializedName: "externalIdFieldName", + ...ExportSettings.type.modelProperties, + dateFormat: { + serializedName: "dateFormat", type: { name: "any" } }, - ignoreNullValues: { - serializedName: "ignoreNullValues", + timestampFormat: { + serializedName: "timestampFormat", type: { name: "any" } @@ -17354,30 +19796,24 @@ export const SalesforceSink: coreHttp.CompositeMapper = { } }; -export const SalesforceServiceCloudSink: coreHttp.CompositeMapper = { - serializedName: "SalesforceServiceCloudSink", +export const AzureDatabricksDeltaLakeImportCommand: coreHttp.CompositeMapper = { + serializedName: "AzureDatabricksDeltaLakeImportCommand", type: { name: "Composite", - className: "SalesforceServiceCloudSink", - uberParent: "CopySink", + className: "AzureDatabricksDeltaLakeImportCommand", + uberParent: "ImportSettings", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + polymorphicDiscriminator: ImportSettings.type.polymorphicDiscriminator, modelProperties: { - ...CopySink.type.modelProperties, - writeBehavior: { - serializedName: "writeBehavior", - type: { - name: "String" - } - }, - externalIdFieldName: { - serializedName: "externalIdFieldName", + ...ImportSettings.type.modelProperties, + dateFormat: { + serializedName: "dateFormat", type: { name: "any" } }, - ignoreNullValues: { - serializedName: "ignoreNullValues", + timestampFormat: { + serializedName: "timestampFormat", type: { name: "any" } @@ -17386,20 +19822,28 @@ export const SalesforceServiceCloudSink: coreHttp.CompositeMapper = { } }; -export const CosmosDbMongoDbApiSink: coreHttp.CompositeMapper = { - serializedName: "CosmosDbMongoDbApiSink", +export const SnowflakeImportCopyCommand: coreHttp.CompositeMapper = { + serializedName: "SnowflakeImportCopyCommand", type: { name: "Composite", - className: "CosmosDbMongoDbApiSink", - uberParent: "CopySink", + className: "SnowflakeImportCopyCommand", + uberParent: "ImportSettings", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + polymorphicDiscriminator: ImportSettings.type.polymorphicDiscriminator, modelProperties: { - ...CopySink.type.modelProperties, - writeBehavior: { - serializedName: "writeBehavior", + ...ImportSettings.type.modelProperties, + additionalCopyOptions: { + serializedName: "additionalCopyOptions", type: { - name: "any" + name: "Dictionary", + value: { type: { name: "any" } } + } + }, + additionalFormatOptions: { + serializedName: "additionalFormatOptions", + type: { + name: "Dictionary", + value: { type: { name: "any" } } } } } @@ -17445,6 +19889,19 @@ export const TabularTranslator: coreHttp.CompositeMapper = { type: { name: "any" } + }, + typeConversion: { + serializedName: "typeConversion", + type: { + name: "any" + } + }, + typeConversionSettings: { + serializedName: "typeConversionSettings", + type: { + name: "Composite", + className: "TypeConversionSettings" + } } } } @@ -17484,7 +19941,9 @@ export const SelfDependencyTumblingWindowTriggerReference: coreHttp.CompositeMap ...DependencyReference.type.modelProperties, offset: { constraints: { - Pattern: new RegExp("((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9]))"), + Pattern: new RegExp( + "-((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9]))" + ), MaxLength: 15, MinLength: 8 }, @@ -17496,7 +19955,9 @@ export const SelfDependencyTumblingWindowTriggerReference: coreHttp.CompositeMap }, size: { constraints: { - Pattern: new RegExp("((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9]))"), + Pattern: new RegExp( + "((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9]))" + ), MaxLength: 15, MinLength: 8 }, @@ -17515,7 +19976,8 @@ export const LinkedIntegrationRuntimeKeyAuthorization: coreHttp.CompositeMapper name: "Composite", className: "LinkedIntegrationRuntimeKeyAuthorization", uberParent: "LinkedIntegrationRuntimeType", - polymorphicDiscriminator: LinkedIntegrationRuntimeType.type.polymorphicDiscriminator, + polymorphicDiscriminator: + LinkedIntegrationRuntimeType.type.polymorphicDiscriminator, modelProperties: { ...LinkedIntegrationRuntimeType.type.modelProperties, key: { @@ -17535,7 +19997,8 @@ export const LinkedIntegrationRuntimeRbacAuthorization: coreHttp.CompositeMapper name: "Composite", className: "LinkedIntegrationRuntimeRbacAuthorization", uberParent: "LinkedIntegrationRuntimeType", - polymorphicDiscriminator: LinkedIntegrationRuntimeType.type.polymorphicDiscriminator, + polymorphicDiscriminator: + LinkedIntegrationRuntimeType.type.polymorphicDiscriminator, modelProperties: { ...LinkedIntegrationRuntimeType.type.modelProperties, resourceId: { @@ -18109,7 +20572,7 @@ export const WaitActivity: coreHttp.CompositeMapper = { serializedName: "typeProperties.waitTimeInSeconds", required: true, type: { - name: "Number" + name: "any" } } } @@ -18432,6 +20895,20 @@ export const CopyActivity: coreHttp.CompositeMapper = { className: "RedirectIncompatibleRowSettings" } }, + logStorageSettings: { + serializedName: "typeProperties.logStorageSettings", + type: { + name: "Composite", + className: "LogStorageSettings" + } + }, + logSettings: { + serializedName: "typeProperties.logSettings", + type: { + name: "Composite", + className: "LogSettings" + } + }, preserveRules: { serializedName: "typeProperties.preserveRules", type: { @@ -18453,6 +20930,19 @@ export const CopyActivity: coreHttp.CompositeMapper = { } } } + }, + validateDataConsistency: { + serializedName: "typeProperties.validateDataConsistency", + type: { + name: "any" + } + }, + skipErrorFile: { + serializedName: "typeProperties.skipErrorFile", + type: { + name: "Composite", + className: "SkipErrorFile" + } } } } @@ -19027,6 +21517,12 @@ export const CustomActivity: coreHttp.CompositeMapper = { type: { name: "any" } + }, + autoUserSpecification: { + serializedName: "typeProperties.autoUserSpecification", + type: { + name: "any" + } } } } @@ -19106,6 +21602,13 @@ export const DeleteActivity: coreHttp.CompositeMapper = { name: "Composite", className: "DatasetReference" } + }, + storeSettings: { + serializedName: "typeProperties.storeSettings", + type: { + name: "Composite", + className: "StoreReadSettings" + } } } } @@ -19277,6 +21780,20 @@ export const GetMetadataActivity: coreHttp.CompositeMapper = { } } } + }, + storeSettings: { + serializedName: "typeProperties.storeSettings", + type: { + name: "Composite", + className: "StoreReadSettings" + } + }, + formatSettings: { + serializedName: "typeProperties.formatSettings", + type: { + name: "Composite", + className: "FormatReadSettings" + } } } } @@ -19638,8 +22155,8 @@ export const ExecuteDataFlowActivity: coreHttp.CompositeMapper = { polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, modelProperties: { ...ExecutionActivity.type.modelProperties, - dataFlow: { - serializedName: "typeProperties.dataFlow", + dataflow: { + serializedName: "typeProperties.dataflow", type: { name: "Composite", className: "DataFlowReference" @@ -19665,6 +22182,24 @@ export const ExecuteDataFlowActivity: coreHttp.CompositeMapper = { name: "Composite", className: "ExecuteDataFlowActivityTypePropertiesCompute" } + }, + traceLevel: { + serializedName: "typeProperties.traceLevel", + type: { + name: "any" + } + }, + continueOnError: { + serializedName: "typeProperties.continueOnError", + type: { + name: "any" + } + }, + runConcurrently: { + serializedName: "typeProperties.runConcurrently", + type: { + name: "any" + } } } } @@ -19826,6 +22361,51 @@ export const BlobEventsTrigger: coreHttp.CompositeMapper = { } }; +export const CustomEventsTrigger: coreHttp.CompositeMapper = { + serializedName: "CustomEventsTrigger", + type: { + name: "Composite", + className: "CustomEventsTrigger", + uberParent: "Trigger", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Trigger.type.polymorphicDiscriminator, + modelProperties: { + ...MultiplePipelineTrigger.type.modelProperties, + subjectBeginsWith: { + serializedName: "typeProperties.subjectBeginsWith", + type: { + name: "String" + } + }, + subjectEndsWith: { + serializedName: "typeProperties.subjectEndsWith", + type: { + name: "String" + } + }, + events: { + serializedName: "typeProperties.events", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "any" + } + } + } + }, + scope: { + serializedName: "typeProperties.scope", + required: true, + type: { + name: "String" + } + } + } + } +}; + export const AzureTableSource: coreHttp.CompositeMapper = { serializedName: "AzureTableSource", type: { @@ -20033,6 +22613,12 @@ export const SapCloudForCustomerSource: coreHttp.CompositeMapper = { type: { name: "any" } + }, + httpRequestTimeout: { + serializedName: "httpRequestTimeout", + type: { + name: "any" + } } } } @@ -20053,6 +22639,12 @@ export const SapEccSource: coreHttp.CompositeMapper = { type: { name: "any" } + }, + httpRequestTimeout: { + serializedName: "httpRequestTimeout", + type: { + name: "any" + } } } } @@ -20118,6 +22710,18 @@ export const SapOpenHubSource: coreHttp.CompositeMapper = { type: { name: "any" } + }, + customRfcReadTableFunctionModule: { + serializedName: "customRfcReadTableFunctionModule", + type: { + name: "any" + } + }, + sapDataColumnDelimiter: { + serializedName: "sapDataColumnDelimiter", + type: { + name: "any" + } } } } @@ -20169,6 +22773,12 @@ export const SapTableSource: coreHttp.CompositeMapper = { name: "any" } }, + sapDataColumnDelimiter: { + serializedName: "sapDataColumnDelimiter", + type: { + name: "any" + } + }, partitionOption: { serializedName: "partitionOption", type: { @@ -20216,6 +22826,25 @@ export const SqlSource: coreHttp.CompositeMapper = { type: { name: "Composite", className: "StoredProcedureParameter" } } } + }, + isolationLevel: { + serializedName: "isolationLevel", + type: { + name: "any" + } + }, + partitionOption: { + serializedName: "partitionOption", + type: { + name: "any" + } + }, + partitionSettings: { + serializedName: "partitionSettings", + type: { + name: "Composite", + className: "SqlPartitionSettings" + } } } } @@ -20257,6 +22886,19 @@ export const SqlServerSource: coreHttp.CompositeMapper = { type: { name: "any" } + }, + partitionOption: { + serializedName: "partitionOption", + type: { + name: "any" + } + }, + partitionSettings: { + serializedName: "partitionSettings", + type: { + name: "Composite", + className: "SqlPartitionSettings" + } } } } @@ -20298,6 +22940,19 @@ export const AzureSqlSource: coreHttp.CompositeMapper = { type: { name: "any" } + }, + partitionOption: { + serializedName: "partitionOption", + type: { + name: "any" + } + }, + partitionSettings: { + serializedName: "partitionSettings", + type: { + name: "Composite", + className: "SqlPartitionSettings" + } } } } @@ -20339,6 +22994,19 @@ export const SqlMISource: coreHttp.CompositeMapper = { type: { name: "any" } + }, + partitionOption: { + serializedName: "partitionOption", + type: { + name: "any" + } + }, + partitionSettings: { + serializedName: "partitionSettings", + type: { + name: "Composite", + className: "SqlPartitionSettings" + } } } } @@ -20371,6 +23039,19 @@ export const SqlDWSource: coreHttp.CompositeMapper = { type: { name: "any" } + }, + partitionOption: { + serializedName: "partitionOption", + type: { + name: "any" + } + }, + partitionSettings: { + serializedName: "partitionSettings", + type: { + name: "Composite", + className: "SqlPartitionSettings" + } } } } @@ -21103,6 +23784,12 @@ export const DynamicsAXSource: coreHttp.CompositeMapper = { type: { name: "any" } + }, + httpRequestTimeout: { + serializedName: "httpRequestTimeout", + type: { + name: "any" + } } } } @@ -21186,7 +23873,9 @@ export const TumblingWindowTriggerDependencyReference: coreHttp.CompositeMapper ...TriggerDependencyReference.type.modelProperties, offset: { constraints: { - Pattern: new RegExp("((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9]))"), + Pattern: new RegExp( + "-?((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9]))" + ), MaxLength: 15, MinLength: 8 }, @@ -21197,7 +23886,9 @@ export const TumblingWindowTriggerDependencyReference: coreHttp.CompositeMapper }, size: { constraints: { - Pattern: new RegExp("((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9]))"), + Pattern: new RegExp( + "((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9]))" + ), MaxLength: 15, MinLength: 8 }, @@ -21472,9 +24163,12 @@ export let discriminators = { StoreReadSettings: StoreReadSettings, StoreWriteSettings: StoreWriteSettings, FormatReadSettings: FormatReadSettings, + CompressionReadSettings: CompressionReadSettings, FormatWriteSettings: FormatWriteSettings, CopySource: CopySource, CopySink: CopySink, + ExportSettings: ExportSettings, + ImportSettings: ImportSettings, CopyTranslator: CopyTranslator, DependencyReference: DependencyReference, "CustomSetupBase.undefined": CustomSetupBase, @@ -21513,6 +24207,7 @@ export let discriminators = { "LinkedService.Web": WebLinkedService, "LinkedService.Cassandra": CassandraLinkedService, "LinkedService.MongoDb": MongoDbLinkedService, + "LinkedService.MongoDbAtlas": MongoDbAtlasLinkedService, "LinkedService.MongoDbV2": MongoDbV2LinkedService, "LinkedService.CosmosDbMongoDbApi": CosmosDbMongoDbApiLinkedService, "LinkedService.AzureDataLakeStore": AzureDataLakeStoreLinkedService, @@ -21566,6 +24261,7 @@ export let discriminators = { "LinkedService.HDInsightOnDemand": HDInsightOnDemandLinkedService, "LinkedService.AzureDataLakeAnalytics": AzureDataLakeAnalyticsLinkedService, "LinkedService.AzureDatabricks": AzureDatabricksLinkedService, + "LinkedService.AzureDatabricksDeltaLake": AzureDatabricksDeltaLakeLinkedService, "LinkedService.Responsys": ResponsysLinkedService, "LinkedService.DynamicsAX": DynamicsAXLinkedService, "LinkedService.OracleServiceCloud": OracleServiceCloudLinkedService, @@ -21573,12 +24269,18 @@ export let discriminators = { "LinkedService.SapTable": SapTableLinkedService, "LinkedService.AzureDataExplorer": AzureDataExplorerLinkedService, "LinkedService.AzureFunction": AzureFunctionLinkedService, + "LinkedService.Snowflake": SnowflakeLinkedService, + "LinkedService.SharePointOnlineList": SharePointOnlineListLinkedService, + "Dataset.AmazonS3Object": AmazonS3Dataset, "Dataset.Avro": AvroDataset, + "Dataset.Excel": ExcelDataset, "Dataset.Parquet": ParquetDataset, "Dataset.DelimitedText": DelimitedTextDataset, "Dataset.Json": JsonDataset, + "Dataset.Xml": XmlDataset, "Dataset.Orc": OrcDataset, "Dataset.Binary": BinaryDataset, + "Dataset.AzureBlob": AzureBlobDataset, "Dataset.AzureTable": AzureTableDataset, "Dataset.AzureSqlTable": AzureSqlTableDataset, "Dataset.AzureSqlMITable": AzureSqlMITableDataset, @@ -21590,8 +24292,12 @@ export let discriminators = { "Dataset.DynamicsEntity": DynamicsEntityDataset, "Dataset.DynamicsCrmEntity": DynamicsCrmEntityDataset, "Dataset.CommonDataServiceForAppsEntity": CommonDataServiceForAppsEntityDataset, + "Dataset.AzureDataLakeStoreFile": AzureDataLakeStoreDataset, + "Dataset.AzureBlobFSFile": AzureBlobFSDataset, "Dataset.Office365Table": Office365Dataset, + "Dataset.FileShare": FileShareDataset, "Dataset.MongoDbCollection": MongoDbCollectionDataset, + "Dataset.MongoDbAtlasCollection": MongoDbAtlasCollectionDataset, "Dataset.MongoDbV2Collection": MongoDbV2CollectionDataset, "Dataset.CosmosDbMongoDbApiCollection": CosmosDbMongoDbApiCollectionDataset, "Dataset.ODataResource": ODataResourceDataset, @@ -21619,6 +24325,7 @@ export let discriminators = { "Dataset.SapTableResource": SapTableResourceDataset, "Dataset.WebTable": WebTableDataset, "Dataset.AzureSearchIndex": AzureSearchIndexDataset, + "Dataset.HttpFile": HttpDataset, "Dataset.AmazonMWSObject": AmazonMWSObjectDataset, "Dataset.AzurePostgreSqlTable": AzurePostgreSqlTableDataset, "Dataset.ConcurObject": ConcurObjectDataset, @@ -21654,6 +24361,9 @@ export let discriminators = { "Dataset.OracleServiceCloudObject": OracleServiceCloudObjectDataset, "Dataset.AzureDataExplorerTable": AzureDataExplorerTableDataset, "Dataset.GoogleAdWordsObject": GoogleAdWordsObjectDataset, + "Dataset.SnowflakeTable": SnowflakeDataset, + "Dataset.SharePointOnlineListResource": SharePointOnlineListResourceDataset, + "Dataset.AzureDatabricksDeltaLakeDataset": AzureDatabricksDeltaLakeDataset, "Activity.Container": ControlActivity, "Activity.Execution": ExecutionActivity, "Activity.SqlPoolStoredProcedure": SqlPoolStoredProcedureActivity, @@ -21686,6 +24396,8 @@ export let discriminators = { "DatasetCompression.GZip": DatasetGZipCompression, "DatasetCompression.Deflate": DatasetDeflateCompression, "DatasetCompression.ZipDeflate": DatasetZipDeflateCompression, + "DatasetCompression.Tar": DatasetTarCompression, + "DatasetCompression.TarGZip": DatasetTarGZipCompression, "WebLinkedServiceTypeProperties.Anonymous": WebAnonymousAuthentication, "WebLinkedServiceTypeProperties.Basic": WebBasicAuthentication, "WebLinkedServiceTypeProperties.ClientCertificate": WebClientCertificateAuthentication, @@ -21705,14 +24417,25 @@ export let discriminators = { "StoreWriteSettings.AzureBlobFSWriteSettings": AzureBlobFSWriteSettings, "StoreWriteSettings.AzureDataLakeStoreWriteSettings": AzureDataLakeStoreWriteSettings, "StoreWriteSettings.FileServerWriteSettings": FileServerWriteSettings, + "StoreWriteSettings.AzureFileStorageWriteSettings": AzureFileStorageWriteSettings, "FormatReadSettings.DelimitedTextReadSettings": DelimitedTextReadSettings, + "FormatReadSettings.JsonReadSettings": JsonReadSettings, + "FormatReadSettings.XmlReadSettings": XmlReadSettings, + "FormatReadSettings.BinaryReadSettings": BinaryReadSettings, + "CompressionReadSettings.ZipDeflateReadSettings": ZipDeflateReadSettings, + "CompressionReadSettings.TarReadSettings": TarReadSettings, + "CompressionReadSettings.TarGZipReadSettings": TarGZipReadSettings, "FormatWriteSettings.AvroWriteSettings": AvroWriteSettings, + "FormatWriteSettings.OrcWriteSettings": OrcWriteSettings, + "FormatWriteSettings.ParquetWriteSettings": ParquetWriteSettings, "FormatWriteSettings.DelimitedTextWriteSettings": DelimitedTextWriteSettings, "FormatWriteSettings.JsonWriteSettings": JsonWriteSettings, "CopySource.AvroSource": AvroSource, + "CopySource.ExcelSource": ExcelSource, "CopySource.ParquetSource": ParquetSource, "CopySource.DelimitedTextSource": DelimitedTextSource, "CopySource.JsonSource": JsonSource, + "CopySource.XmlSource": XmlSource, "CopySource.OrcSource": OrcSource, "CopySource.BinarySource": BinarySource, "CopySource.TabularSource": TabularSource, @@ -21733,17 +24456,23 @@ export let discriminators = { "CopySource.OracleSource": OracleSource, "CopySource.WebSource": WebSource, "CopySource.MongoDbSource": MongoDbSource, + "CopySource.MongoDbAtlasSource": MongoDbAtlasSource, "CopySource.MongoDbV2Source": MongoDbV2Source, "CopySource.CosmosDbMongoDbApiSource": CosmosDbMongoDbApiSource, "CopySource.Office365Source": Office365Source, "CopySource.AzureDataLakeStoreSource": AzureDataLakeStoreSource, "CopySource.AzureBlobFSSource": AzureBlobFSSource, "CopySource.HttpSource": HttpSource, + "CopySource.SnowflakeSource": SnowflakeSource, + "CopySource.AzureDatabricksDeltaLakeSource": AzureDatabricksDeltaLakeSource, + "CopySource.SharePointOnlineListSource": SharePointOnlineListSource, "CopySink.DelimitedTextSink": DelimitedTextSink, "CopySink.JsonSink": JsonSink, "CopySink.OrcSink": OrcSink, + "CopySink.RestSink": RestSink, "CopySink.AzurePostgreSqlSink": AzurePostgreSqlSink, "CopySink.AzureMySqlSink": AzureMySqlSink, + "CopySink.AzureDatabricksDeltaLakeSink": AzureDatabricksDeltaLakeSink, "CopySink.SapCloudForCustomerSink": SapCloudForCustomerSink, "CopySink.AzureQueueSink": AzureQueueSink, "CopySink.AzureTableSink": AzureTableSink, @@ -21759,6 +24488,7 @@ export let discriminators = { "CopySink.AzureSqlSink": AzureSqlSink, "CopySink.SqlMISink": SqlMISink, "CopySink.SqlDWSink": SqlDWSink, + "CopySink.SnowflakeSink": SnowflakeSink, "CopySink.OracleSink": OracleSink, "CopySink.AzureDataLakeStoreSink": AzureDataLakeStoreSink, "CopySink.AzureBlobFSSink": AzureBlobFSSink, @@ -21773,6 +24503,10 @@ export let discriminators = { "CopySink.SalesforceSink": SalesforceSink, "CopySink.SalesforceServiceCloudSink": SalesforceServiceCloudSink, "CopySink.CosmosDbMongoDbApiSink": CosmosDbMongoDbApiSink, + "ExportSettings.SnowflakeExportCopyCommand": SnowflakeExportCopyCommand, + "ExportSettings.AzureDatabricksDeltaLakeExportCommand": AzureDatabricksDeltaLakeExportCommand, + "ImportSettings.AzureDatabricksDeltaLakeImportCommand": AzureDatabricksDeltaLakeImportCommand, + "ImportSettings.SnowflakeImportCopyCommand": SnowflakeImportCopyCommand, "CopyTranslator.TabularTranslator": TabularTranslator, "DependencyReference.TriggerDependencyReference": TriggerDependencyReference, "DependencyReference.SelfDependencyTumblingWindowTriggerReference": SelfDependencyTumblingWindowTriggerReference, @@ -21817,6 +24551,7 @@ export let discriminators = { "Trigger.ScheduleTrigger": ScheduleTrigger, "Trigger.BlobTrigger": BlobTrigger, "Trigger.BlobEventsTrigger": BlobEventsTrigger, + "Trigger.CustomEventsTrigger": CustomEventsTrigger, "CopySource.AzureTableSource": AzureTableSource, "CopySource.InformixSource": InformixSource, "CopySource.Db2Source": Db2Source, diff --git a/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts b/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts index 62b8bf720da6..3ede037709f5 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts @@ -8,6 +8,7 @@ import { SpanStatusCode } from "@azure/core-tracing"; import { createSpan } from "../tracing"; +import { BigDataPools } from "../operationsInterfaces"; import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; @@ -15,7 +16,7 @@ import { ArtifactsClientContext } from "../artifactsClientContext"; import { BigDataPoolsListResponse, BigDataPoolsGetResponse } from "../models"; /** Class representing a BigDataPools. */ -export class BigDataPools { +export class BigDataPoolsImpl implements BigDataPools { private readonly client: ArtifactsClientContext; /** @@ -30,14 +31,23 @@ export class BigDataPools { * List Big Data Pools * @param options The options parameters. */ - async list(options?: coreHttp.OperationOptions): Promise { - const { span, updatedOptions } = createSpan("ArtifactsClient-list", options); - + async list( + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-list", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { - const result = await this.client.sendOperationRequest(operationArguments, listOperationSpec); + const result = await this.client.sendOperationRequest( + operationArguments, + listOperationSpec + ); return result as BigDataPoolsListResponse; } catch (error) { span.setStatus({ @@ -59,14 +69,21 @@ export class BigDataPools { bigDataPoolName: string, options?: coreHttp.OperationOptions ): Promise { - const { span, updatedOptions } = createSpan("ArtifactsClient-get", options); - + const { span, updatedOptions } = createSpan( + "ArtifactsClient-get", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { bigDataPoolName, - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { - const result = await this.client.sendOperationRequest(operationArguments, getOperationSpec); + const result = await this.client.sendOperationRequest( + operationArguments, + getOperationSpec + ); return result as BigDataPoolsGetResponse; } catch (error) { span.setStatus({ diff --git a/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts b/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts index 68c01e0baf0c..d7c22b2ab841 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts @@ -10,6 +10,7 @@ import { SpanStatusCode } from "@azure/core-tracing"; import { createSpan } from "../tracing"; import "@azure/core-paging"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import { DataFlow } from "../operationsInterfaces"; import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; @@ -28,7 +29,7 @@ import { /// /** Class representing a DataFlow. */ -export class DataFlow { +export class DataFlowImpl implements DataFlow { private readonly client: ArtifactsClientContext; /** @@ -67,7 +68,10 @@ export class DataFlow { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getDataFlowsByWorkspaceNext(continuationToken, options); + result = await this._getDataFlowsByWorkspaceNext( + continuationToken, + options + ); continuationToken = result.nextLink; yield result.value || []; } @@ -92,7 +96,10 @@ export class DataFlow { dataFlow: DataFlowResource, options?: DataFlowCreateOrUpdateDataFlowOptionalParams ): Promise> { - const { span, updatedOptions } = createSpan("ArtifactsClient-createOrUpdateDataFlow", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-createOrUpdateDataFlow", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { dataFlowName, dataFlow, @@ -137,10 +144,15 @@ export class DataFlow { dataFlowName: string, options?: DataFlowGetDataFlowOptionalParams ): Promise { - const { span, updatedOptions } = createSpan("ArtifactsClient-getDataFlow", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-getDataFlow", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { dataFlowName, - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { const result = await this.client.sendOperationRequest( @@ -168,7 +180,10 @@ export class DataFlow { dataFlowName: string, options?: coreHttp.OperationOptions ): Promise> { - const { span, updatedOptions } = createSpan("ArtifactsClient-deleteDataFlow", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-deleteDataFlow", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { dataFlowName, options: this.getOperationOptions(updatedOptions, "undefined") @@ -214,7 +229,10 @@ export class DataFlow { request: ArtifactRenameRequest, options?: coreHttp.OperationOptions ): Promise> { - const { span, updatedOptions } = createSpan("ArtifactsClient-renameDataFlow", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-renameDataFlow", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { dataFlowName, request, @@ -259,10 +277,12 @@ export class DataFlow { ): Promise { const { span, updatedOptions } = createSpan( "ArtifactsClient-_getDataFlowsByWorkspace", - options + options || {} ); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { const result = await this.client.sendOperationRequest( @@ -293,11 +313,13 @@ export class DataFlow { ): Promise { const { span, updatedOptions } = createSpan( "ArtifactsClient-_getDataFlowsByWorkspaceNext", - options + options || {} ); const operationArguments: coreHttp.OperationArguments = { nextLink, - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { const result = await this.client.sendOperationRequest( @@ -354,7 +376,11 @@ const createOrUpdateDataFlowOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.dataFlow, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.dataFlowName], - headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], + headerParameters: [ + Parameters.accept, + Parameters.contentType, + Parameters.ifMatch + ], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts b/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts index c41ba7f42c2e..40d6b4c0abf5 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts @@ -10,6 +10,7 @@ import { SpanStatusCode } from "@azure/core-tracing"; import { createSpan } from "../tracing"; import "@azure/core-paging"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import { DataFlowDebugSession } from "../operationsInterfaces"; import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; @@ -30,7 +31,7 @@ import { /// /** Class representing a DataFlowDebugSession. */ -export class DataFlowDebugSession { +export class DataFlowDebugSessionImpl implements DataFlowDebugSession { private readonly client: ArtifactsClientContext; /** @@ -69,7 +70,10 @@ export class DataFlowDebugSession { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._queryDataFlowDebugSessionsByWorkspaceNext(continuationToken, options); + result = await this._queryDataFlowDebugSessionsByWorkspaceNext( + continuationToken, + options + ); continuationToken = result.nextLink; yield result.value || []; } @@ -78,7 +82,9 @@ export class DataFlowDebugSession { private async *queryDataFlowDebugSessionsByWorkspacePagingAll( options?: coreHttp.OperationOptions ): AsyncIterableIterator { - for await (const page of this.queryDataFlowDebugSessionsByWorkspacePagingPage(options)) { + for await (const page of this.queryDataFlowDebugSessionsByWorkspacePagingPage( + options + )) { yield* page; } } @@ -91,10 +97,12 @@ export class DataFlowDebugSession { async createDataFlowDebugSession( request: CreateDataFlowDebugSessionRequest, options?: coreHttp.OperationOptions - ): Promise> { + ): Promise< + LROPoller + > { const { span, updatedOptions } = createSpan( "ArtifactsClient-createDataFlowDebugSession", - options + options || {} ); const operationArguments: coreHttp.OperationArguments = { request, @@ -136,13 +144,17 @@ export class DataFlowDebugSession { */ private async _queryDataFlowDebugSessionsByWorkspace( options?: coreHttp.OperationOptions - ): Promise { + ): Promise< + DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceResponse + > { const { span, updatedOptions } = createSpan( "ArtifactsClient-_queryDataFlowDebugSessionsByWorkspace", - options + options || {} ); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { const result = await this.client.sendOperationRequest( @@ -170,10 +182,15 @@ export class DataFlowDebugSession { request: DataFlowDebugPackage, options?: coreHttp.OperationOptions ): Promise { - const { span, updatedOptions } = createSpan("ArtifactsClient-addDataFlow", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-addDataFlow", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { request, - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { const result = await this.client.sendOperationRequest( @@ -203,11 +220,13 @@ export class DataFlowDebugSession { ): Promise { const { span, updatedOptions } = createSpan( "ArtifactsClient-deleteDataFlowDebugSession", - options + options || {} ); const operationArguments: coreHttp.OperationArguments = { request, - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { const result = await this.client.sendOperationRequest( @@ -235,7 +254,10 @@ export class DataFlowDebugSession { request: DataFlowDebugCommandRequest, options?: coreHttp.OperationOptions ): Promise> { - const { span, updatedOptions } = createSpan("ArtifactsClient-executeCommand", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-executeCommand", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { request, options: this.getOperationOptions(updatedOptions, "undefined") @@ -279,14 +301,18 @@ export class DataFlowDebugSession { private async _queryDataFlowDebugSessionsByWorkspaceNext( nextLink: string, options?: coreHttp.OperationOptions - ): Promise { + ): Promise< + DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceNextResponse + > { const { span, updatedOptions } = createSpan( "ArtifactsClient-_queryDataFlowDebugSessionsByWorkspaceNext", - options + options || {} ); const operationArguments: coreHttp.OperationArguments = { nextLink, - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { const result = await this.client.sendOperationRequest( diff --git a/sdk/synapse/synapse-artifacts/src/operations/dataset.ts b/sdk/synapse/synapse-artifacts/src/operations/dataset.ts index 54d57fab9e44..17160897b375 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/dataset.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/dataset.ts @@ -10,6 +10,7 @@ import { SpanStatusCode } from "@azure/core-tracing"; import { createSpan } from "../tracing"; import "@azure/core-paging"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import { Dataset } from "../operationsInterfaces"; import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; @@ -28,7 +29,7 @@ import { /// /** Class representing a Dataset. */ -export class Dataset { +export class DatasetImpl implements Dataset { private readonly client: ArtifactsClientContext; /** @@ -67,7 +68,10 @@ export class Dataset { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getDatasetsByWorkspaceNext(continuationToken, options); + result = await this._getDatasetsByWorkspaceNext( + continuationToken, + options + ); continuationToken = result.nextLink; yield result.value || []; } @@ -88,9 +92,14 @@ export class Dataset { private async _getDatasetsByWorkspace( options?: coreHttp.OperationOptions ): Promise { - const { span, updatedOptions } = createSpan("ArtifactsClient-_getDatasetsByWorkspace", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-_getDatasetsByWorkspace", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { const result = await this.client.sendOperationRequest( @@ -120,7 +129,10 @@ export class Dataset { dataset: DatasetResource, options?: DatasetCreateOrUpdateDatasetOptionalParams ): Promise> { - const { span, updatedOptions } = createSpan("ArtifactsClient-createOrUpdateDataset", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-createOrUpdateDataset", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { datasetName, dataset, @@ -165,10 +177,15 @@ export class Dataset { datasetName: string, options?: DatasetGetDatasetOptionalParams ): Promise { - const { span, updatedOptions } = createSpan("ArtifactsClient-getDataset", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-getDataset", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { datasetName, - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { const result = await this.client.sendOperationRequest( @@ -196,7 +213,10 @@ export class Dataset { datasetName: string, options?: coreHttp.OperationOptions ): Promise> { - const { span, updatedOptions } = createSpan("ArtifactsClient-deleteDataset", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-deleteDataset", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { datasetName, options: this.getOperationOptions(updatedOptions, "undefined") @@ -242,7 +262,10 @@ export class Dataset { request: ArtifactRenameRequest, options?: coreHttp.OperationOptions ): Promise> { - const { span, updatedOptions } = createSpan("ArtifactsClient-renameDataset", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-renameDataset", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { datasetName, request, @@ -289,11 +312,13 @@ export class Dataset { ): Promise { const { span, updatedOptions } = createSpan( "ArtifactsClient-_getDatasetsByWorkspaceNext", - options + options || {} ); const operationArguments: coreHttp.OperationArguments = { nextLink, - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { const result = await this.client.sendOperationRequest( @@ -366,7 +391,11 @@ const createOrUpdateDatasetOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.dataset, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.datasetName], - headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], + headerParameters: [ + Parameters.accept, + Parameters.contentType, + Parameters.ifMatch + ], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts b/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts index b0738164c48e..e8b0d2f9cbc8 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts @@ -8,14 +8,18 @@ import { SpanStatusCode } from "@azure/core-tracing"; import { createSpan } from "../tracing"; +import { IntegrationRuntimes } from "../operationsInterfaces"; import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; import { ArtifactsClientContext } from "../artifactsClientContext"; -import { IntegrationRuntimesListResponse, IntegrationRuntimesGetResponse } from "../models"; +import { + IntegrationRuntimesListResponse, + IntegrationRuntimesGetResponse +} from "../models"; /** Class representing a IntegrationRuntimes. */ -export class IntegrationRuntimes { +export class IntegrationRuntimesImpl implements IntegrationRuntimes { private readonly client: ArtifactsClientContext; /** @@ -30,13 +34,23 @@ export class IntegrationRuntimes { * List Integration Runtimes * @param options The options parameters. */ - async list(options?: coreHttp.OperationOptions): Promise { - const { span, updatedOptions } = createSpan("ArtifactsClient-list", options); + async list( + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-list", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { - const result = await this.client.sendOperationRequest(operationArguments, listOperationSpec); + const result = await this.client.sendOperationRequest( + operationArguments, + listOperationSpec + ); return result as IntegrationRuntimesListResponse; } catch (error) { span.setStatus({ @@ -58,13 +72,21 @@ export class IntegrationRuntimes { integrationRuntimeName: string, options?: coreHttp.OperationOptions ): Promise { - const { span, updatedOptions } = createSpan("ArtifactsClient-get", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-get", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { integrationRuntimeName, - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { - const result = await this.client.sendOperationRequest(operationArguments, getOperationSpec); + const result = await this.client.sendOperationRequest( + operationArguments, + getOperationSpec + ); return result as IntegrationRuntimesGetResponse; } catch (error) { span.setStatus({ diff --git a/sdk/synapse/synapse-artifacts/src/operations/library.ts b/sdk/synapse/synapse-artifacts/src/operations/library.ts index 07a072cd79bf..e37f6b1c8125 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/library.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/library.ts @@ -10,6 +10,7 @@ import { SpanStatusCode } from "@azure/core-tracing"; import { createSpan } from "../tracing"; import "@azure/core-paging"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import { Library } from "../operationsInterfaces"; import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; @@ -26,7 +27,7 @@ import { /// /** Class representing a Library. */ -export class Library { +export class LibraryImpl implements Library { private readonly client: ArtifactsClientContext; /** @@ -41,7 +42,9 @@ export class Library { * Lists Library. * @param options The options parameters. */ - public list(options?: coreHttp.OperationOptions): PagedAsyncIterableIterator { + public list( + options?: coreHttp.OperationOptions + ): PagedAsyncIterableIterator { const iter = this.listPagingAll(options); return { next() { @@ -81,13 +84,23 @@ export class Library { * Lists Library. * @param options The options parameters. */ - private async _list(options?: coreHttp.OperationOptions): Promise { - const { span, updatedOptions } = createSpan("ArtifactsClient-_list", options); + private async _list( + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-_list", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { - const result = await this.client.sendOperationRequest(operationArguments, listOperationSpec); + const result = await this.client.sendOperationRequest( + operationArguments, + listOperationSpec + ); return result as LibraryListOperationResponse; } catch (error) { span.setStatus({ @@ -110,7 +123,10 @@ export class Library { libraryName: string, options?: coreHttp.OperationOptions ): Promise> { - const { span, updatedOptions } = createSpan("ArtifactsClient-flush", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-flush", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { libraryName, options: this.getOperationOptions(updatedOptions, "undefined") @@ -133,7 +149,10 @@ export class Library { } }; - const initialOperationResult = await sendOperation(operationArguments, flushOperationSpec); + const initialOperationResult = await sendOperation( + operationArguments, + flushOperationSpec + ); return new LROPoller({ initialOperationArguments: operationArguments, initialOperationSpec: flushOperationSpec, @@ -151,10 +170,15 @@ export class Library { operationId: string, options?: coreHttp.OperationOptions ): Promise { - const { span, updatedOptions } = createSpan("ArtifactsClient-getOperationResult", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-getOperationResult", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { operationId, - options: this.getOperationOptions(updatedOptions, "undefined") + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { const result = await this.client.sendOperationRequest( @@ -183,7 +207,10 @@ export class Library { libraryName: string, options?: coreHttp.OperationOptions ): Promise> { - const { span, updatedOptions } = createSpan("ArtifactsClient-delete", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-delete", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { libraryName, options: this.getOperationOptions(updatedOptions, "undefined") @@ -206,7 +233,10 @@ export class Library { } }; - const initialOperationResult = await sendOperation(operationArguments, deleteOperationSpec); + const initialOperationResult = await sendOperation( + operationArguments, + deleteOperationSpec + ); return new LROPoller({ initialOperationArguments: operationArguments, initialOperationSpec: deleteOperationSpec, @@ -221,14 +251,25 @@ export class Library { * extension length. * @param options The options parameters. */ - async get(libraryName: string, options?: coreHttp.OperationOptions): Promise { - const { span, updatedOptions } = createSpan("ArtifactsClient-get", options); + async get( + libraryName: string, + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-get", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { libraryName, - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { - const result = await this.client.sendOperationRequest(operationArguments, getOperationSpec); + const result = await this.client.sendOperationRequest( + operationArguments, + getOperationSpec + ); return result as LibraryGetResponse; } catch (error) { span.setStatus({ @@ -251,7 +292,10 @@ export class Library { libraryName: string, options?: coreHttp.OperationOptions ): Promise> { - const { span, updatedOptions } = createSpan("ArtifactsClient-create", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-create", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { libraryName, options: this.getOperationOptions(updatedOptions, "undefined") @@ -274,7 +318,10 @@ export class Library { } }; - const initialOperationResult = await sendOperation(operationArguments, createOperationSpec); + const initialOperationResult = await sendOperation( + operationArguments, + createOperationSpec + ); return new LROPoller({ initialOperationArguments: operationArguments, initialOperationSpec: createOperationSpec, @@ -296,11 +343,16 @@ export class Library { content: coreHttp.HttpRequestBody, options?: LibraryAppendOptionalParams ): Promise { - const { span, updatedOptions } = createSpan("ArtifactsClient-append", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-append", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { libraryName, content, - options: this.getOperationOptions(updatedOptions, "undefined") + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { const result = await this.client.sendOperationRequest( @@ -328,10 +380,15 @@ export class Library { nextLink: string, options?: coreHttp.OperationOptions ): Promise { - const { span, updatedOptions } = createSpan("ArtifactsClient-_listNext", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-_listNext", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { nextLink, - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { const result = await this.client.sendOperationRequest( diff --git a/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts b/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts index 2b8bf86fe6ba..5c398f239806 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts @@ -10,6 +10,7 @@ import { SpanStatusCode } from "@azure/core-tracing"; import { createSpan } from "../tracing"; import "@azure/core-paging"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import { LinkedService } from "../operationsInterfaces"; import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; @@ -28,7 +29,7 @@ import { /// /** Class representing a LinkedService. */ -export class LinkedService { +export class LinkedServiceImpl implements LinkedService { private readonly client: ArtifactsClientContext; /** @@ -67,7 +68,10 @@ export class LinkedService { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getLinkedServicesByWorkspaceNext(continuationToken, options); + result = await this._getLinkedServicesByWorkspaceNext( + continuationToken, + options + ); continuationToken = result.nextLink; yield result.value || []; } @@ -76,7 +80,9 @@ export class LinkedService { private async *getLinkedServicesByWorkspacePagingAll( options?: coreHttp.OperationOptions ): AsyncIterableIterator { - for await (const page of this.getLinkedServicesByWorkspacePagingPage(options)) { + for await (const page of this.getLinkedServicesByWorkspacePagingPage( + options + )) { yield* page; } } @@ -90,10 +96,12 @@ export class LinkedService { ): Promise { const { span, updatedOptions } = createSpan( "ArtifactsClient-_getLinkedServicesByWorkspace", - options + options || {} ); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { const result = await this.client.sendOperationRequest( @@ -125,7 +133,7 @@ export class LinkedService { ): Promise> { const { span, updatedOptions } = createSpan( "ArtifactsClient-createOrUpdateLinkedService", - options + options || {} ); const operationArguments: coreHttp.OperationArguments = { linkedServiceName, @@ -171,10 +179,15 @@ export class LinkedService { linkedServiceName: string, options?: LinkedServiceGetLinkedServiceOptionalParams ): Promise { - const { span, updatedOptions } = createSpan("ArtifactsClient-getLinkedService", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-getLinkedService", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { linkedServiceName, - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { const result = await this.client.sendOperationRequest( @@ -202,7 +215,10 @@ export class LinkedService { linkedServiceName: string, options?: coreHttp.OperationOptions ): Promise> { - const { span, updatedOptions } = createSpan("ArtifactsClient-deleteLinkedService", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-deleteLinkedService", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { linkedServiceName, options: this.getOperationOptions(updatedOptions, "undefined") @@ -248,7 +264,10 @@ export class LinkedService { request: ArtifactRenameRequest, options?: coreHttp.OperationOptions ): Promise> { - const { span, updatedOptions } = createSpan("ArtifactsClient-renameLinkedService", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-renameLinkedService", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { linkedServiceName, request, @@ -296,11 +315,13 @@ export class LinkedService { ): Promise { const { span, updatedOptions } = createSpan( "ArtifactsClient-_getLinkedServicesByWorkspaceNext", - options + options || {} ); const operationArguments: coreHttp.OperationArguments = { nextLink, - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { const result = await this.client.sendOperationRequest( @@ -373,7 +394,11 @@ const createOrUpdateLinkedServiceOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.linkedService, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.linkedServiceName], - headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], + headerParameters: [ + Parameters.accept, + Parameters.contentType, + Parameters.ifMatch + ], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/notebook.ts b/sdk/synapse/synapse-artifacts/src/operations/notebook.ts index 1782aca47170..6e41623aab20 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/notebook.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/notebook.ts @@ -10,6 +10,7 @@ import { SpanStatusCode } from "@azure/core-tracing"; import { createSpan } from "../tracing"; import "@azure/core-paging"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import { Notebook } from "../operationsInterfaces"; import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; @@ -30,7 +31,7 @@ import { /// /** Class representing a Notebook. */ -export class Notebook { +export class NotebookImpl implements Notebook { private readonly client: ArtifactsClientContext; /** @@ -69,7 +70,10 @@ export class Notebook { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getNotebooksByWorkspaceNext(continuationToken, options); + result = await this._getNotebooksByWorkspaceNext( + continuationToken, + options + ); continuationToken = result.nextLink; yield result.value || []; } @@ -111,7 +115,10 @@ export class Notebook { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getNotebookSummaryByWorkSpaceNext(continuationToken, options); + result = await this._getNotebookSummaryByWorkSpaceNext( + continuationToken, + options + ); continuationToken = result.nextLink; yield result.value || []; } @@ -120,7 +127,9 @@ export class Notebook { private async *getNotebookSummaryByWorkSpacePagingAll( options?: coreHttp.OperationOptions ): AsyncIterableIterator { - for await (const page of this.getNotebookSummaryByWorkSpacePagingPage(options)) { + for await (const page of this.getNotebookSummaryByWorkSpacePagingPage( + options + )) { yield* page; } } @@ -134,10 +143,12 @@ export class Notebook { ): Promise { const { span, updatedOptions } = createSpan( "ArtifactsClient-_getNotebooksByWorkspace", - options + options || {} ); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { const result = await this.client.sendOperationRequest( @@ -165,10 +176,12 @@ export class Notebook { ): Promise { const { span, updatedOptions } = createSpan( "ArtifactsClient-_getNotebookSummaryByWorkSpace", - options + options || {} ); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { const result = await this.client.sendOperationRequest( @@ -198,7 +211,10 @@ export class Notebook { notebook: NotebookResource, options?: NotebookCreateOrUpdateNotebookOptionalParams ): Promise> { - const { span, updatedOptions } = createSpan("ArtifactsClient-createOrUpdateNotebook", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-createOrUpdateNotebook", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { notebookName, notebook, @@ -243,10 +259,15 @@ export class Notebook { notebookName: string, options?: NotebookGetNotebookOptionalParams ): Promise { - const { span, updatedOptions } = createSpan("ArtifactsClient-getNotebook", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-getNotebook", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { notebookName, - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { const result = await this.client.sendOperationRequest( @@ -274,7 +295,10 @@ export class Notebook { notebookName: string, options?: coreHttp.OperationOptions ): Promise> { - const { span, updatedOptions } = createSpan("ArtifactsClient-deleteNotebook", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-deleteNotebook", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { notebookName, options: this.getOperationOptions(updatedOptions, "undefined") @@ -320,7 +344,10 @@ export class Notebook { request: ArtifactRenameRequest, options?: coreHttp.OperationOptions ): Promise> { - const { span, updatedOptions } = createSpan("ArtifactsClient-renameNotebook", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-renameNotebook", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { notebookName, request, @@ -368,11 +395,13 @@ export class Notebook { ): Promise { const { span, updatedOptions } = createSpan( "ArtifactsClient-_getNotebooksByWorkspaceNext", - options + options || {} ); const operationArguments: coreHttp.OperationArguments = { nextLink, - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { const result = await this.client.sendOperationRequest( @@ -403,11 +432,13 @@ export class Notebook { ): Promise { const { span, updatedOptions } = createSpan( "ArtifactsClient-_getNotebookSummaryByWorkSpaceNext", - options + options || {} ); const operationArguments: coreHttp.OperationArguments = { nextLink, - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { const result = await this.client.sendOperationRequest( @@ -496,7 +527,11 @@ const createOrUpdateNotebookOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.notebook, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.notebookName], - headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], + headerParameters: [ + Parameters.accept, + Parameters.contentType, + Parameters.ifMatch + ], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts b/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts index 73c88a88dccd..026b76663b27 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts @@ -10,6 +10,7 @@ import { SpanStatusCode } from "@azure/core-tracing"; import { createSpan } from "../tracing"; import "@azure/core-paging"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import { Pipeline } from "../operationsInterfaces"; import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; @@ -30,7 +31,7 @@ import { /// /** Class representing a Pipeline. */ -export class Pipeline { +export class PipelineImpl implements Pipeline { private readonly client: ArtifactsClientContext; /** @@ -69,7 +70,10 @@ export class Pipeline { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getPipelinesByWorkspaceNext(continuationToken, options); + result = await this._getPipelinesByWorkspaceNext( + continuationToken, + options + ); continuationToken = result.nextLink; yield result.value || []; } @@ -92,10 +96,12 @@ export class Pipeline { ): Promise { const { span, updatedOptions } = createSpan( "ArtifactsClient-_getPipelinesByWorkspace", - options + options || {} ); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { const result = await this.client.sendOperationRequest( @@ -125,7 +131,10 @@ export class Pipeline { pipeline: PipelineResource, options?: PipelineCreateOrUpdatePipelineOptionalParams ): Promise> { - const { span, updatedOptions } = createSpan("ArtifactsClient-createOrUpdatePipeline", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-createOrUpdatePipeline", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { pipelineName, pipeline, @@ -170,10 +179,15 @@ export class Pipeline { pipelineName: string, options?: PipelineGetPipelineOptionalParams ): Promise { - const { span, updatedOptions } = createSpan("ArtifactsClient-getPipeline", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-getPipeline", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { pipelineName, - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { const result = await this.client.sendOperationRequest( @@ -201,7 +215,10 @@ export class Pipeline { pipelineName: string, options?: coreHttp.OperationOptions ): Promise> { - const { span, updatedOptions } = createSpan("ArtifactsClient-deletePipeline", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-deletePipeline", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { pipelineName, options: this.getOperationOptions(updatedOptions, "undefined") @@ -247,7 +264,10 @@ export class Pipeline { request: ArtifactRenameRequest, options?: coreHttp.OperationOptions ): Promise> { - const { span, updatedOptions } = createSpan("ArtifactsClient-renamePipeline", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-renamePipeline", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { pipelineName, request, @@ -292,10 +312,15 @@ export class Pipeline { pipelineName: string, options?: PipelineCreatePipelineRunOptionalParams ): Promise { - const { span, updatedOptions } = createSpan("ArtifactsClient-createPipelineRun", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-createPipelineRun", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { pipelineName, - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { const result = await this.client.sendOperationRequest( @@ -326,11 +351,13 @@ export class Pipeline { ): Promise { const { span, updatedOptions } = createSpan( "ArtifactsClient-_getPipelinesByWorkspaceNext", - options + options || {} ); const operationArguments: coreHttp.OperationArguments = { nextLink, - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { const result = await this.client.sendOperationRequest( @@ -403,7 +430,11 @@ const createOrUpdatePipelineOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.pipeline, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.pipelineName], - headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], + headerParameters: [ + Parameters.accept, + Parameters.contentType, + Parameters.ifMatch + ], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts b/sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts index 2deaa6ff93a6..7f8e0a69ec64 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts @@ -6,9 +6,9 @@ * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -// Licensed under the MIT license. import { SpanStatusCode } from "@azure/core-tracing"; import { createSpan } from "../tracing"; +import { PipelineRun } from "../operationsInterfaces"; import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; @@ -22,7 +22,7 @@ import { } from "../models"; /** Class representing a PipelineRun. */ -export class PipelineRun { +export class PipelineRunImpl implements PipelineRun { private readonly client: ArtifactsClientContext; /** @@ -44,11 +44,13 @@ export class PipelineRun { ): Promise { const { span, updatedOptions } = createSpan( "ArtifactsClient-queryPipelineRunsByWorkspace", - options + options || {} ); const operationArguments: coreHttp.OperationArguments = { filterParameters, - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { const result = await this.client.sendOperationRequest( @@ -76,10 +78,15 @@ export class PipelineRun { runId: string, options?: coreHttp.OperationOptions ): Promise { - const { span, updatedOptions } = createSpan("ArtifactsClient-getPipelineRun", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-getPipelineRun", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { runId, - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { const result = await this.client.sendOperationRequest( @@ -111,12 +118,17 @@ export class PipelineRun { filterParameters: RunFilterParameters, options?: coreHttp.OperationOptions ): Promise { - const { span, updatedOptions } = createSpan("ArtifactsClient-queryActivityRuns", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-queryActivityRuns", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { pipelineName, runId, filterParameters, - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { const result = await this.client.sendOperationRequest( @@ -144,10 +156,15 @@ export class PipelineRun { runId: string, options?: PipelineRunCancelPipelineRunOptionalParams ): Promise { - const { span, updatedOptions } = createSpan("ArtifactsClient-cancelPipelineRun", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-cancelPipelineRun", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { runId, - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { const result = await this.client.sendOperationRequest( @@ -216,7 +233,11 @@ const queryActivityRunsOperationSpec: coreHttp.OperationSpec = { }, requestBody: Parameters.filterParameters, queryParameters: [Parameters.apiVersion], - urlParameters: [Parameters.endpoint, Parameters.pipelineName, Parameters.runId], + urlParameters: [ + Parameters.endpoint, + Parameters.pipelineName, + Parameters.runId + ], headerParameters: [Parameters.accept, Parameters.contentType], mediaType: "json", serializer diff --git a/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts b/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts index b7e858b3584d..47afdbb83ede 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts @@ -10,6 +10,7 @@ import { SpanStatusCode } from "@azure/core-tracing"; import { createSpan } from "../tracing"; import "@azure/core-paging"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import { SparkJobDefinition } from "../operationsInterfaces"; import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; @@ -30,7 +31,7 @@ import { /// /** Class representing a SparkJobDefinition. */ -export class SparkJobDefinition { +export class SparkJobDefinitionImpl implements SparkJobDefinition { private readonly client: ArtifactsClientContext; /** @@ -69,7 +70,10 @@ export class SparkJobDefinition { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getSparkJobDefinitionsByWorkspaceNext(continuationToken, options); + result = await this._getSparkJobDefinitionsByWorkspaceNext( + continuationToken, + options + ); continuationToken = result.nextLink; yield result.value || []; } @@ -78,7 +82,9 @@ export class SparkJobDefinition { private async *getSparkJobDefinitionsByWorkspacePagingAll( options?: coreHttp.OperationOptions ): AsyncIterableIterator { - for await (const page of this.getSparkJobDefinitionsByWorkspacePagingPage(options)) { + for await (const page of this.getSparkJobDefinitionsByWorkspacePagingPage( + options + )) { yield* page; } } @@ -92,10 +98,12 @@ export class SparkJobDefinition { ): Promise { const { span, updatedOptions } = createSpan( "ArtifactsClient-_getSparkJobDefinitionsByWorkspace", - options + options || {} ); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { const result = await this.client.sendOperationRequest( @@ -124,10 +132,12 @@ export class SparkJobDefinition { sparkJobDefinitionName: string, sparkJobDefinition: SparkJobDefinitionResource, options?: SparkJobDefinitionCreateOrUpdateSparkJobDefinitionOptionalParams - ): Promise> { + ): Promise< + LROPoller + > { const { span, updatedOptions } = createSpan( "ArtifactsClient-createOrUpdateSparkJobDefinition", - options + options || {} ); const operationArguments: coreHttp.OperationArguments = { sparkJobDefinitionName, @@ -173,10 +183,15 @@ export class SparkJobDefinition { sparkJobDefinitionName: string, options?: SparkJobDefinitionGetSparkJobDefinitionOptionalParams ): Promise { - const { span, updatedOptions } = createSpan("ArtifactsClient-getSparkJobDefinition", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-getSparkJobDefinition", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { sparkJobDefinitionName, - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { const result = await this.client.sendOperationRequest( @@ -206,7 +221,7 @@ export class SparkJobDefinition { ): Promise> { const { span, updatedOptions } = createSpan( "ArtifactsClient-deleteSparkJobDefinition", - options + options || {} ); const operationArguments: coreHttp.OperationArguments = { sparkJobDefinitionName, @@ -253,7 +268,7 @@ export class SparkJobDefinition { ): Promise> { const { span, updatedOptions } = createSpan( "ArtifactsClient-executeSparkJobDefinition", - options + options || {} ); const operationArguments: coreHttp.OperationArguments = { sparkJobDefinitionName, @@ -303,7 +318,7 @@ export class SparkJobDefinition { ): Promise> { const { span, updatedOptions } = createSpan( "ArtifactsClient-renameSparkJobDefinition", - options + options || {} ); const operationArguments: coreHttp.OperationArguments = { sparkJobDefinitionName, @@ -349,7 +364,10 @@ export class SparkJobDefinition { sparkJobDefinitionAzureResource: SparkJobDefinitionResource, options?: coreHttp.OperationOptions ): Promise> { - const { span, updatedOptions } = createSpan("ArtifactsClient-debugSparkJobDefinition", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-debugSparkJobDefinition", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { sparkJobDefinitionAzureResource, options: this.getOperationOptions(updatedOptions, "location") @@ -397,11 +415,13 @@ export class SparkJobDefinition { ): Promise { const { span, updatedOptions } = createSpan( "ArtifactsClient-_getSparkJobDefinitionsByWorkspaceNext", - options + options || {} ); const operationArguments: coreHttp.OperationArguments = { nextLink, - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { const result = await this.client.sendOperationRequest( @@ -474,7 +494,11 @@ const createOrUpdateSparkJobDefinitionOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.sparkJobDefinition, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.sparkJobDefinitionName], - headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], + headerParameters: [ + Parameters.accept, + Parameters.contentType, + Parameters.ifMatch + ], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts b/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts index 340d898cdc10..f5a738de072a 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts @@ -8,6 +8,7 @@ import { SpanStatusCode } from "@azure/core-tracing"; import { createSpan } from "../tracing"; +import { SqlPools } from "../operationsInterfaces"; import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; @@ -15,7 +16,7 @@ import { ArtifactsClientContext } from "../artifactsClientContext"; import { SqlPoolsListResponse, SqlPoolsGetResponse } from "../models"; /** Class representing a SqlPools. */ -export class SqlPools { +export class SqlPoolsImpl implements SqlPools { private readonly client: ArtifactsClientContext; /** @@ -30,13 +31,23 @@ export class SqlPools { * List Sql Pools * @param options The options parameters. */ - async list(options?: coreHttp.OperationOptions): Promise { - const { span, updatedOptions } = createSpan("ArtifactsClient-list", options); + async list( + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-list", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { - const result = await this.client.sendOperationRequest(operationArguments, listOperationSpec); + const result = await this.client.sendOperationRequest( + operationArguments, + listOperationSpec + ); return result as SqlPoolsListResponse; } catch (error) { span.setStatus({ @@ -58,13 +69,21 @@ export class SqlPools { sqlPoolName: string, options?: coreHttp.OperationOptions ): Promise { - const { span, updatedOptions } = createSpan("ArtifactsClient-get", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-get", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { sqlPoolName, - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { - const result = await this.client.sendOperationRequest(operationArguments, getOperationSpec); + const result = await this.client.sendOperationRequest( + operationArguments, + getOperationSpec + ); return result as SqlPoolsGetResponse; } catch (error) { span.setStatus({ diff --git a/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts b/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts index e1c91f2c1cf6..bb4c1d07b6bc 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts @@ -5,11 +5,12 @@ * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -// Licensed under the MIT license. + import { SpanStatusCode } from "@azure/core-tracing"; import { createSpan } from "../tracing"; import "@azure/core-paging"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import { SqlScript } from "../operationsInterfaces"; import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; @@ -28,7 +29,7 @@ import { /// /** Class representing a SqlScript. */ -export class SqlScript { +export class SqlScriptImpl implements SqlScript { private readonly client: ArtifactsClientContext; /** @@ -67,7 +68,10 @@ export class SqlScript { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getSqlScriptsByWorkspaceNext(continuationToken, options); + result = await this._getSqlScriptsByWorkspaceNext( + continuationToken, + options + ); continuationToken = result.nextLink; yield result.value || []; } @@ -90,10 +94,12 @@ export class SqlScript { ): Promise { const { span, updatedOptions } = createSpan( "ArtifactsClient-_getSqlScriptsByWorkspace", - options + options || {} ); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { const result = await this.client.sendOperationRequest( @@ -123,7 +129,10 @@ export class SqlScript { sqlScript: SqlScriptResource, options?: SqlScriptCreateOrUpdateSqlScriptOptionalParams ): Promise> { - const { span, updatedOptions } = createSpan("ArtifactsClient-createOrUpdateSqlScript", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-createOrUpdateSqlScript", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { sqlScriptName, sqlScript, @@ -168,10 +177,15 @@ export class SqlScript { sqlScriptName: string, options?: SqlScriptGetSqlScriptOptionalParams ): Promise { - const { span, updatedOptions } = createSpan("ArtifactsClient-getSqlScript", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-getSqlScript", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { sqlScriptName, - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { const result = await this.client.sendOperationRequest( @@ -199,7 +213,10 @@ export class SqlScript { sqlScriptName: string, options?: coreHttp.OperationOptions ): Promise> { - const { span, updatedOptions } = createSpan("ArtifactsClient-deleteSqlScript", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-deleteSqlScript", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { sqlScriptName, options: this.getOperationOptions(updatedOptions, "undefined") @@ -245,7 +262,10 @@ export class SqlScript { request: ArtifactRenameRequest, options?: coreHttp.OperationOptions ): Promise> { - const { span, updatedOptions } = createSpan("ArtifactsClient-renameSqlScript", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-renameSqlScript", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { sqlScriptName, request, @@ -293,11 +313,13 @@ export class SqlScript { ): Promise { const { span, updatedOptions } = createSpan( "ArtifactsClient-_getSqlScriptsByWorkspaceNext", - options + options || {} ); const operationArguments: coreHttp.OperationArguments = { nextLink, - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { const result = await this.client.sendOperationRequest( @@ -370,7 +392,11 @@ const createOrUpdateSqlScriptOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.sqlScript, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.sqlScriptName], - headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], + headerParameters: [ + Parameters.accept, + Parameters.contentType, + Parameters.ifMatch + ], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/trigger.ts b/sdk/synapse/synapse-artifacts/src/operations/trigger.ts index 5a41389b2ebb..f7e5619c4fc9 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/trigger.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/trigger.ts @@ -5,10 +5,12 @@ * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ + import { SpanStatusCode } from "@azure/core-tracing"; import { createSpan } from "../tracing"; import "@azure/core-paging"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import { Trigger } from "../operationsInterfaces"; import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; @@ -29,7 +31,7 @@ import { /// /** Class representing a Trigger. */ -export class Trigger { +export class TriggerImpl implements Trigger { private readonly client: ArtifactsClientContext; /** @@ -68,7 +70,10 @@ export class Trigger { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getTriggersByWorkspaceNext(continuationToken, options); + result = await this._getTriggersByWorkspaceNext( + continuationToken, + options + ); continuationToken = result.nextLink; yield result.value || []; } @@ -89,9 +94,14 @@ export class Trigger { private async _getTriggersByWorkspace( options?: coreHttp.OperationOptions ): Promise { - const { span, updatedOptions } = createSpan("ArtifactsClient-_getTriggersByWorkspace", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-_getTriggersByWorkspace", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { const result = await this.client.sendOperationRequest( @@ -121,7 +131,10 @@ export class Trigger { trigger: TriggerResource, options?: TriggerCreateOrUpdateTriggerOptionalParams ): Promise> { - const { span, updatedOptions } = createSpan("ArtifactsClient-createOrUpdateTrigger", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-createOrUpdateTrigger", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { triggerName, trigger, @@ -166,10 +179,15 @@ export class Trigger { triggerName: string, options?: TriggerGetTriggerOptionalParams ): Promise { - const { span, updatedOptions } = createSpan("ArtifactsClient-getTrigger", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-getTrigger", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { triggerName, - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { const result = await this.client.sendOperationRequest( @@ -197,7 +215,10 @@ export class Trigger { triggerName: string, options?: coreHttp.OperationOptions ): Promise> { - const { span, updatedOptions } = createSpan("ArtifactsClient-deleteTrigger", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-deleteTrigger", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { triggerName, options: this.getOperationOptions(updatedOptions, "undefined") @@ -243,7 +264,7 @@ export class Trigger { ): Promise> { const { span, updatedOptions } = createSpan( "ArtifactsClient-subscribeTriggerToEvents", - options + options || {} ); const operationArguments: coreHttp.OperationArguments = { triggerName, @@ -290,11 +311,13 @@ export class Trigger { ): Promise { const { span, updatedOptions } = createSpan( "ArtifactsClient-getEventSubscriptionStatus", - options + options || {} ); const operationArguments: coreHttp.OperationArguments = { triggerName, - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { const result = await this.client.sendOperationRequest( @@ -324,7 +347,7 @@ export class Trigger { ): Promise> { const { span, updatedOptions } = createSpan( "ArtifactsClient-unsubscribeTriggerFromEvents", - options + options || {} ); const operationArguments: coreHttp.OperationArguments = { triggerName, @@ -369,7 +392,10 @@ export class Trigger { triggerName: string, options?: coreHttp.OperationOptions ): Promise> { - const { span, updatedOptions } = createSpan("ArtifactsClient-startTrigger", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-startTrigger", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { triggerName, options: this.getOperationOptions(updatedOptions, "undefined") @@ -413,7 +439,10 @@ export class Trigger { triggerName: string, options?: coreHttp.OperationOptions ): Promise> { - const { span, updatedOptions } = createSpan("ArtifactsClient-stopTrigger", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-stopTrigger", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { triggerName, options: this.getOperationOptions(updatedOptions, "undefined") @@ -459,11 +488,13 @@ export class Trigger { ): Promise { const { span, updatedOptions } = createSpan( "ArtifactsClient-_getTriggersByWorkspaceNext", - options + options || {} ); const operationArguments: coreHttp.OperationArguments = { nextLink, - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { const result = await this.client.sendOperationRequest( @@ -536,7 +567,11 @@ const createOrUpdateTriggerOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.trigger, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.triggerName], - headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], + headerParameters: [ + Parameters.accept, + Parameters.contentType, + Parameters.ifMatch + ], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts b/sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts index 975e7481bb3a..c2c5b712b000 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts @@ -5,16 +5,21 @@ * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ + import { SpanStatusCode } from "@azure/core-tracing"; import { createSpan } from "../tracing"; +import { TriggerRun } from "../operationsInterfaces"; import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; import { ArtifactsClientContext } from "../artifactsClientContext"; -import { RunFilterParameters, TriggerRunQueryTriggerRunsByWorkspaceResponse } from "../models"; +import { + RunFilterParameters, + TriggerRunQueryTriggerRunsByWorkspaceResponse +} from "../models"; /** Class representing a TriggerRun. */ -export class TriggerRun { +export class TriggerRunImpl implements TriggerRun { private readonly client: ArtifactsClientContext; /** @@ -36,11 +41,16 @@ export class TriggerRun { runId: string, options?: coreHttp.OperationOptions ): Promise { - const { span, updatedOptions } = createSpan("ArtifactsClient-rerunTriggerInstance", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-rerunTriggerInstance", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { triggerName, runId, - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { const result = await this.client.sendOperationRequest( @@ -70,11 +80,16 @@ export class TriggerRun { runId: string, options?: coreHttp.OperationOptions ): Promise { - const { span, updatedOptions } = createSpan("ArtifactsClient-cancelTriggerInstance", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-cancelTriggerInstance", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { triggerName, runId, - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { const result = await this.client.sendOperationRequest( @@ -104,11 +119,13 @@ export class TriggerRun { ): Promise { const { span, updatedOptions } = createSpan( "ArtifactsClient-queryTriggerRunsByWorkspace", - options + options || {} ); const operationArguments: coreHttp.OperationArguments = { filterParameters, - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { const result = await this.client.sendOperationRequest( @@ -140,7 +157,11 @@ const rerunTriggerInstanceOperationSpec: coreHttp.OperationSpec = { } }, queryParameters: [Parameters.apiVersion], - urlParameters: [Parameters.endpoint, Parameters.runId, Parameters.triggerName], + urlParameters: [ + Parameters.endpoint, + Parameters.runId, + Parameters.triggerName + ], headerParameters: [Parameters.accept], serializer }; @@ -154,7 +175,11 @@ const cancelTriggerInstanceOperationSpec: coreHttp.OperationSpec = { } }, queryParameters: [Parameters.apiVersion], - urlParameters: [Parameters.endpoint, Parameters.runId, Parameters.triggerName], + urlParameters: [ + Parameters.endpoint, + Parameters.runId, + Parameters.triggerName + ], headerParameters: [Parameters.accept], serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/workspace.ts b/sdk/synapse/synapse-artifacts/src/operations/workspace.ts index 1aab36d3bd29..e659ffb31a03 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/workspace.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/workspace.ts @@ -5,8 +5,10 @@ * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ + import { SpanStatusCode } from "@azure/core-tracing"; import { createSpan } from "../tracing"; +import { Workspace } from "../operationsInterfaces"; import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; @@ -14,7 +16,7 @@ import { ArtifactsClientContext } from "../artifactsClientContext"; import { WorkspaceGetResponse } from "../models"; /** Class representing a Workspace. */ -export class Workspace { +export class WorkspaceImpl implements Workspace { private readonly client: ArtifactsClientContext; /** @@ -29,13 +31,23 @@ export class Workspace { * Get Workspace * @param options The options parameters. */ - async get(options?: coreHttp.OperationOptions): Promise { - const { span, updatedOptions } = createSpan("ArtifactsClient-get", options); + async get( + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-get", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { - const result = await this.client.sendOperationRequest(operationArguments, getOperationSpec); + const result = await this.client.sendOperationRequest( + operationArguments, + getOperationSpec + ); return result as WorkspaceGetResponse; } catch (error) { span.setStatus({ diff --git a/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts b/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts index 33d45f941658..2e217d6470c3 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts @@ -5,8 +5,10 @@ * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ + import { SpanStatusCode } from "@azure/core-tracing"; import { createSpan } from "../tracing"; +import { WorkspaceGitRepoManagement } from "../operationsInterfaces"; import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; @@ -18,7 +20,8 @@ import { } from "../models"; /** Class representing a WorkspaceGitRepoManagement. */ -export class WorkspaceGitRepoManagement { +export class WorkspaceGitRepoManagementImpl + implements WorkspaceGitRepoManagement { private readonly client: ArtifactsClientContext; /** @@ -38,10 +41,15 @@ export class WorkspaceGitRepoManagement { gitHubAccessTokenRequest: GitHubAccessTokenRequest, options?: WorkspaceGitRepoManagementGetGitHubAccessTokenOptionalParams ): Promise { - const { span, updatedOptions } = createSpan("ArtifactsClient-getGitHubAccessToken", options); + const { span, updatedOptions } = createSpan( + "ArtifactsClient-getGitHubAccessToken", + options || {} + ); const operationArguments: coreHttp.OperationArguments = { gitHubAccessTokenRequest, - options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions) + options: coreHttp.operationOptionsToRequestOptionsBase( + updatedOptions || {} + ) }; try { const result = await this.client.sendOperationRequest( @@ -74,7 +82,11 @@ const getGitHubAccessTokenOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.gitHubAccessTokenRequest, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint], - headerParameters: [Parameters.accept, Parameters.contentType, Parameters.clientRequestId], + headerParameters: [ + Parameters.accept, + Parameters.contentType, + Parameters.clientRequestId + ], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/bigDataPools.ts b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/bigDataPools.ts new file mode 100644 index 000000000000..6cd8be3e6946 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/bigDataPools.ts @@ -0,0 +1,28 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ + +import * as coreHttp from "@azure/core-http"; +import { BigDataPoolsListResponse, BigDataPoolsGetResponse } from "../models"; + +/** Interface representing a BigDataPools. */ +export interface BigDataPools { + /** + * List Big Data Pools + * @param options The options parameters. + */ + list(options?: coreHttp.OperationOptions): Promise; + /** + * Get Big Data Pool + * @param bigDataPoolName The Big Data Pool name + * @param options The options parameters. + */ + get( + bigDataPoolName: string, + options?: coreHttp.OperationOptions + ): Promise; +} diff --git a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/dataFlow.ts b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/dataFlow.ts new file mode 100644 index 000000000000..9ad9e26b9cbe --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/dataFlow.ts @@ -0,0 +1,72 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ + +import "@azure/core-paging"; +import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import * as coreHttp from "@azure/core-http"; +import { LROPoller } from "../lro"; +import { + DataFlowResource, + DataFlowCreateOrUpdateDataFlowOptionalParams, + DataFlowCreateOrUpdateDataFlowResponse, + DataFlowGetDataFlowOptionalParams, + DataFlowGetDataFlowResponse, + ArtifactRenameRequest +} from "../models"; + +/// +/** Interface representing a DataFlow. */ +export interface DataFlow { + /** + * Lists data flows. + * @param options The options parameters. + */ + listDataFlowsByWorkspace( + options?: coreHttp.OperationOptions + ): PagedAsyncIterableIterator; + /** + * Creates or updates a data flow. + * @param dataFlowName The data flow name. + * @param dataFlow Data flow resource definition. + * @param options The options parameters. + */ + createOrUpdateDataFlow( + dataFlowName: string, + dataFlow: DataFlowResource, + options?: DataFlowCreateOrUpdateDataFlowOptionalParams + ): Promise>; + /** + * Gets a data flow. + * @param dataFlowName The data flow name. + * @param options The options parameters. + */ + getDataFlow( + dataFlowName: string, + options?: DataFlowGetDataFlowOptionalParams + ): Promise; + /** + * Deletes a data flow. + * @param dataFlowName The data flow name. + * @param options The options parameters. + */ + deleteDataFlow( + dataFlowName: string, + options?: coreHttp.OperationOptions + ): Promise>; + /** + * Renames a dataflow. + * @param dataFlowName The data flow name. + * @param request proposed new name. + * @param options The options parameters. + */ + renameDataFlow( + dataFlowName: string, + request: ArtifactRenameRequest, + options?: coreHttp.OperationOptions + ): Promise>; +} diff --git a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/dataFlowDebugSession.ts b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/dataFlowDebugSession.ts new file mode 100644 index 000000000000..ff975c209fcd --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/dataFlowDebugSession.ts @@ -0,0 +1,70 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ + +import "@azure/core-paging"; +import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import * as coreHttp from "@azure/core-http"; +import { LROPoller } from "../lro"; +import { + DataFlowDebugSessionInfo, + CreateDataFlowDebugSessionRequest, + DataFlowDebugSessionCreateDataFlowDebugSessionResponse, + DataFlowDebugPackage, + DataFlowDebugSessionAddDataFlowResponse, + DeleteDataFlowDebugSessionRequest, + DataFlowDebugCommandRequest, + DataFlowDebugSessionExecuteCommandResponse +} from "../models"; + +/// +/** Interface representing a DataFlowDebugSession. */ +export interface DataFlowDebugSession { + /** + * Query all active data flow debug sessions. + * @param options The options parameters. + */ + listQueryDataFlowDebugSessionsByWorkspace( + options?: coreHttp.OperationOptions + ): PagedAsyncIterableIterator; + /** + * Creates a data flow debug session. + * @param request Data flow debug session definition + * @param options The options parameters. + */ + createDataFlowDebugSession( + request: CreateDataFlowDebugSessionRequest, + options?: coreHttp.OperationOptions + ): Promise>; + /** + * Add a data flow into debug session. + * @param request Data flow debug session definition with debug content. + * @param options The options parameters. + */ + addDataFlow( + request: DataFlowDebugPackage, + options?: coreHttp.OperationOptions + ): Promise; + /** + * Deletes a data flow debug session. + * @param request Data flow debug session definition for deletion + * @param options The options parameters. + */ + deleteDataFlowDebugSession( + request: DeleteDataFlowDebugSessionRequest, + options?: coreHttp.OperationOptions + ): Promise; + /** + * Execute a data flow debug command. + * @param request Data flow debug command definition. + * @param options The options parameters. + */ + executeCommand( + request: DataFlowDebugCommandRequest, + options?: coreHttp.OperationOptions + ): Promise>; +} diff --git a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/dataset.ts b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/dataset.ts new file mode 100644 index 000000000000..6ddaa9a408fe --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/dataset.ts @@ -0,0 +1,72 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ + +import "@azure/core-paging"; +import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import * as coreHttp from "@azure/core-http"; +import { LROPoller } from "../lro"; +import { + DatasetResource, + DatasetCreateOrUpdateDatasetOptionalParams, + DatasetCreateOrUpdateDatasetResponse, + DatasetGetDatasetOptionalParams, + DatasetGetDatasetResponse, + ArtifactRenameRequest +} from "../models"; + +/// +/** Interface representing a Dataset. */ +export interface Dataset { + /** + * Lists datasets. + * @param options The options parameters. + */ + listDatasetsByWorkspace( + options?: coreHttp.OperationOptions + ): PagedAsyncIterableIterator; + /** + * Creates or updates a dataset. + * @param datasetName The dataset name. + * @param dataset Dataset resource definition. + * @param options The options parameters. + */ + createOrUpdateDataset( + datasetName: string, + dataset: DatasetResource, + options?: DatasetCreateOrUpdateDatasetOptionalParams + ): Promise>; + /** + * Gets a dataset. + * @param datasetName The dataset name. + * @param options The options parameters. + */ + getDataset( + datasetName: string, + options?: DatasetGetDatasetOptionalParams + ): Promise; + /** + * Deletes a dataset. + * @param datasetName The dataset name. + * @param options The options parameters. + */ + deleteDataset( + datasetName: string, + options?: coreHttp.OperationOptions + ): Promise>; + /** + * Renames a dataset. + * @param datasetName The dataset name. + * @param request proposed new name. + * @param options The options parameters. + */ + renameDataset( + datasetName: string, + request: ArtifactRenameRequest, + options?: coreHttp.OperationOptions + ): Promise>; +} diff --git a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/index.ts b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/index.ts new file mode 100644 index 000000000000..bf87a2306dd8 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/index.ts @@ -0,0 +1,25 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ + +export * from "./linkedService"; +export * from "./dataset"; +export * from "./pipeline"; +export * from "./pipelineRun"; +export * from "./trigger"; +export * from "./triggerRun"; +export * from "./dataFlow"; +export * from "./dataFlowDebugSession"; +export * from "./sqlScript"; +export * from "./sparkJobDefinition"; +export * from "./notebook"; +export * from "./workspace"; +export * from "./sqlPools"; +export * from "./bigDataPools"; +export * from "./integrationRuntimes"; +export * from "./library"; +export * from "./workspaceGitRepoManagement"; diff --git a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/integrationRuntimes.ts b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/integrationRuntimes.ts new file mode 100644 index 000000000000..21abe2e6d7b0 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/integrationRuntimes.ts @@ -0,0 +1,33 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ + +import * as coreHttp from "@azure/core-http"; +import { + IntegrationRuntimesListResponse, + IntegrationRuntimesGetResponse +} from "../models"; + +/** Interface representing a IntegrationRuntimes. */ +export interface IntegrationRuntimes { + /** + * List Integration Runtimes + * @param options The options parameters. + */ + list( + options?: coreHttp.OperationOptions + ): Promise; + /** + * Get Integration Runtime + * @param integrationRuntimeName The Integration Runtime name + * @param options The options parameters. + */ + get( + integrationRuntimeName: string, + options?: coreHttp.OperationOptions + ): Promise; +} diff --git a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/library.ts b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/library.ts new file mode 100644 index 000000000000..51e4ba8d8dd6 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/library.ts @@ -0,0 +1,93 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ + + +import "@azure/core-paging"; +import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import * as coreHttp from "@azure/core-http"; +import { LROPoller } from "../lro"; +import { + LibraryResource, + LibraryGetOperationResultResponse, + LibraryGetResponse, + LibraryAppendOptionalParams +} from "../models"; + +/// +/** Interface representing a Library. */ +export interface Library { + /** + * Lists Library. + * @param options The options parameters. + */ + list( + options?: coreHttp.OperationOptions + ): PagedAsyncIterableIterator; + /** + * Flush Library + * @param libraryName file name to upload. Minimum length of the filename should be 1 excluding the + * extension length. + * @param options The options parameters. + */ + flush( + libraryName: string, + options?: coreHttp.OperationOptions + ): Promise>; + /** + * Get Operation result for Library + * @param operationId operation id for which status is requested + * @param options The options parameters. + */ + getOperationResult( + operationId: string, + options?: coreHttp.OperationOptions + ): Promise; + /** + * Delete Library + * @param libraryName file name to upload. Minimum length of the filename should be 1 excluding the + * extension length. + * @param options The options parameters. + */ + delete( + libraryName: string, + options?: coreHttp.OperationOptions + ): Promise>; + /** + * Get Library + * @param libraryName file name to upload. Minimum length of the filename should be 1 excluding the + * extension length. + * @param options The options parameters. + */ + get( + libraryName: string, + options?: coreHttp.OperationOptions + ): Promise; + /** + * Creates a library with the library name. + * @param libraryName file name to upload. Minimum length of the filename should be 1 excluding the + * extension length. + * @param options The options parameters. + */ + create( + libraryName: string, + options?: coreHttp.OperationOptions + ): Promise>; + /** + * Append the content to the library resource created using the create operation. The maximum content + * size is 4MiB. Content larger than 4MiB must be appended in 4MiB chunks + * @param libraryName file name to upload. Minimum length of the filename should be 1 excluding the + * extension length. + * @param content Library file chunk. + * @param options The options parameters. + */ + append( + libraryName: string, + content: coreHttp.HttpRequestBody, + options?: LibraryAppendOptionalParams + ): Promise; +} diff --git a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/linkedService.ts b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/linkedService.ts new file mode 100644 index 000000000000..e407c4a3773e --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/linkedService.ts @@ -0,0 +1,72 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ + +import "@azure/core-paging"; +import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import * as coreHttp from "@azure/core-http"; +import { LROPoller } from "../lro"; +import { + LinkedServiceResource, + LinkedServiceCreateOrUpdateLinkedServiceOptionalParams, + LinkedServiceCreateOrUpdateLinkedServiceResponse, + LinkedServiceGetLinkedServiceOptionalParams, + LinkedServiceGetLinkedServiceResponse, + ArtifactRenameRequest +} from "../models"; + +/// +/** Interface representing a LinkedService. */ +export interface LinkedService { + /** + * Lists linked services. + * @param options The options parameters. + */ + listLinkedServicesByWorkspace( + options?: coreHttp.OperationOptions + ): PagedAsyncIterableIterator; + /** + * Creates or updates a linked service. + * @param linkedServiceName The linked service name. + * @param linkedService Linked service resource definition. + * @param options The options parameters. + */ + createOrUpdateLinkedService( + linkedServiceName: string, + linkedService: LinkedServiceResource, + options?: LinkedServiceCreateOrUpdateLinkedServiceOptionalParams + ): Promise>; + /** + * Gets a linked service. + * @param linkedServiceName The linked service name. + * @param options The options parameters. + */ + getLinkedService( + linkedServiceName: string, + options?: LinkedServiceGetLinkedServiceOptionalParams + ): Promise; + /** + * Deletes a linked service. + * @param linkedServiceName The linked service name. + * @param options The options parameters. + */ + deleteLinkedService( + linkedServiceName: string, + options?: coreHttp.OperationOptions + ): Promise>; + /** + * Renames a linked service. + * @param linkedServiceName The linked service name. + * @param request proposed new name. + * @param options The options parameters. + */ + renameLinkedService( + linkedServiceName: string, + request: ArtifactRenameRequest, + options?: coreHttp.OperationOptions + ): Promise>; +} diff --git a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/notebook.ts b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/notebook.ts new file mode 100644 index 000000000000..580dcd8996d5 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/notebook.ts @@ -0,0 +1,79 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ + +import "@azure/core-paging"; +import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import * as coreHttp from "@azure/core-http"; +import { LROPoller } from "../lro"; +import { + NotebookResource, + NotebookCreateOrUpdateNotebookOptionalParams, + NotebookCreateOrUpdateNotebookResponse, + NotebookGetNotebookOptionalParams, + NotebookGetNotebookResponse, + ArtifactRenameRequest +} from "../models"; + +/// +/** Interface representing a Notebook. */ +export interface Notebook { + /** + * Lists Notebooks. + * @param options The options parameters. + */ + listNotebooksByWorkspace( + options?: coreHttp.OperationOptions + ): PagedAsyncIterableIterator; + /** + * Lists a summary of Notebooks. + * @param options The options parameters. + */ + listNotebookSummaryByWorkSpace( + options?: coreHttp.OperationOptions + ): PagedAsyncIterableIterator; + /** + * Creates or updates a Note Book. + * @param notebookName The notebook name. + * @param notebook Note book resource definition. + * @param options The options parameters. + */ + createOrUpdateNotebook( + notebookName: string, + notebook: NotebookResource, + options?: NotebookCreateOrUpdateNotebookOptionalParams + ): Promise>; + /** + * Gets a Note Book. + * @param notebookName The notebook name. + * @param options The options parameters. + */ + getNotebook( + notebookName: string, + options?: NotebookGetNotebookOptionalParams + ): Promise; + /** + * Deletes a Note book. + * @param notebookName The notebook name. + * @param options The options parameters. + */ + deleteNotebook( + notebookName: string, + options?: coreHttp.OperationOptions + ): Promise>; + /** + * Renames a notebook. + * @param notebookName The notebook name. + * @param request proposed new name. + * @param options The options parameters. + */ + renameNotebook( + notebookName: string, + request: ArtifactRenameRequest, + options?: coreHttp.OperationOptions + ): Promise>; +} diff --git a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/pipeline.ts b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/pipeline.ts new file mode 100644 index 000000000000..ee93d5be9287 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/pipeline.ts @@ -0,0 +1,83 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ + +import "@azure/core-paging"; +import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import * as coreHttp from "@azure/core-http"; +import { LROPoller } from "../lro"; +import { + PipelineResource, + PipelineCreateOrUpdatePipelineOptionalParams, + PipelineCreateOrUpdatePipelineResponse, + PipelineGetPipelineOptionalParams, + PipelineGetPipelineResponse, + ArtifactRenameRequest, + PipelineCreatePipelineRunOptionalParams, + PipelineCreatePipelineRunResponse +} from "../models"; + +/// +/** Interface representing a Pipeline. */ +export interface Pipeline { + /** + * Lists pipelines. + * @param options The options parameters. + */ + listPipelinesByWorkspace( + options?: coreHttp.OperationOptions + ): PagedAsyncIterableIterator; + /** + * Creates or updates a pipeline. + * @param pipelineName The pipeline name. + * @param pipeline Pipeline resource definition. + * @param options The options parameters. + */ + createOrUpdatePipeline( + pipelineName: string, + pipeline: PipelineResource, + options?: PipelineCreateOrUpdatePipelineOptionalParams + ): Promise>; + /** + * Gets a pipeline. + * @param pipelineName The pipeline name. + * @param options The options parameters. + */ + getPipeline( + pipelineName: string, + options?: PipelineGetPipelineOptionalParams + ): Promise; + /** + * Deletes a pipeline. + * @param pipelineName The pipeline name. + * @param options The options parameters. + */ + deletePipeline( + pipelineName: string, + options?: coreHttp.OperationOptions + ): Promise>; + /** + * Renames a pipeline. + * @param pipelineName The pipeline name. + * @param request proposed new name. + * @param options The options parameters. + */ + renamePipeline( + pipelineName: string, + request: ArtifactRenameRequest, + options?: coreHttp.OperationOptions + ): Promise>; + /** + * Creates a run of a pipeline. + * @param pipelineName The pipeline name. + * @param options The options parameters. + */ + createPipelineRun( + pipelineName: string, + options?: PipelineCreatePipelineRunOptionalParams + ): Promise; +} diff --git a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/pipelineRun.ts b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/pipelineRun.ts new file mode 100644 index 000000000000..ca9ddbfd7ae8 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/pipelineRun.ts @@ -0,0 +1,60 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ + +import * as coreHttp from "@azure/core-http"; +import { + RunFilterParameters, + PipelineRunQueryPipelineRunsByWorkspaceResponse, + PipelineRunGetPipelineRunResponse, + PipelineRunQueryActivityRunsResponse, + PipelineRunCancelPipelineRunOptionalParams +} from "../models"; + +/** Interface representing a PipelineRun. */ +export interface PipelineRun { + /** + * Query pipeline runs in the workspace based on input filter conditions. + * @param filterParameters Parameters to filter the pipeline run. + * @param options The options parameters. + */ + queryPipelineRunsByWorkspace( + filterParameters: RunFilterParameters, + options?: coreHttp.OperationOptions + ): Promise; + /** + * Get a pipeline run by its run ID. + * @param runId The pipeline run identifier. + * @param options The options parameters. + */ + getPipelineRun( + runId: string, + options?: coreHttp.OperationOptions + ): Promise; + /** + * Query activity runs based on input filter conditions. + * @param pipelineName The pipeline name. + * @param runId The pipeline run identifier. + * @param filterParameters Parameters to filter the activity runs. + * @param options The options parameters. + */ + queryActivityRuns( + pipelineName: string, + runId: string, + filterParameters: RunFilterParameters, + options?: coreHttp.OperationOptions + ): Promise; + /** + * Cancel a pipeline run by its run ID. + * @param runId The pipeline run identifier. + * @param options The options parameters. + */ + cancelPipelineRun( + runId: string, + options?: PipelineRunCancelPipelineRunOptionalParams + ): Promise; +} diff --git a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/sparkJobDefinition.ts b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/sparkJobDefinition.ts new file mode 100644 index 000000000000..9787084497f5 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/sparkJobDefinition.ts @@ -0,0 +1,94 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ + +import "@azure/core-paging"; +import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import * as coreHttp from "@azure/core-http"; +import { LROPoller } from "../lro"; +import { + SparkJobDefinitionResource, + SparkJobDefinitionCreateOrUpdateSparkJobDefinitionOptionalParams, + SparkJobDefinitionCreateOrUpdateSparkJobDefinitionResponse, + SparkJobDefinitionGetSparkJobDefinitionOptionalParams, + SparkJobDefinitionGetSparkJobDefinitionResponse, + SparkJobDefinitionExecuteSparkJobDefinitionResponse, + ArtifactRenameRequest, + SparkJobDefinitionDebugSparkJobDefinitionResponse +} from "../models"; + +/// +/** Interface representing a SparkJobDefinition. */ +export interface SparkJobDefinition { + /** + * Lists spark job definitions. + * @param options The options parameters. + */ + listSparkJobDefinitionsByWorkspace( + options?: coreHttp.OperationOptions + ): PagedAsyncIterableIterator; + /** + * Creates or updates a Spark Job Definition. + * @param sparkJobDefinitionName The spark job definition name. + * @param sparkJobDefinition Spark Job Definition resource definition. + * @param options The options parameters. + */ + createOrUpdateSparkJobDefinition( + sparkJobDefinitionName: string, + sparkJobDefinition: SparkJobDefinitionResource, + options?: SparkJobDefinitionCreateOrUpdateSparkJobDefinitionOptionalParams + ): Promise< + LROPoller + >; + /** + * Gets a Spark Job Definition. + * @param sparkJobDefinitionName The spark job definition name. + * @param options The options parameters. + */ + getSparkJobDefinition( + sparkJobDefinitionName: string, + options?: SparkJobDefinitionGetSparkJobDefinitionOptionalParams + ): Promise; + /** + * Deletes a Spark Job Definition. + * @param sparkJobDefinitionName The spark job definition name. + * @param options The options parameters. + */ + deleteSparkJobDefinition( + sparkJobDefinitionName: string, + options?: coreHttp.OperationOptions + ): Promise>; + /** + * Executes the spark job definition. + * @param sparkJobDefinitionName The spark job definition name. + * @param options The options parameters. + */ + executeSparkJobDefinition( + sparkJobDefinitionName: string, + options?: coreHttp.OperationOptions + ): Promise>; + /** + * Renames a sparkJobDefinition. + * @param sparkJobDefinitionName The spark job definition name. + * @param request proposed new name. + * @param options The options parameters. + */ + renameSparkJobDefinition( + sparkJobDefinitionName: string, + request: ArtifactRenameRequest, + options?: coreHttp.OperationOptions + ): Promise>; + /** + * Debug the spark job definition. + * @param sparkJobDefinitionAzureResource Spark Job Definition resource definition. + * @param options The options parameters. + */ + debugSparkJobDefinition( + sparkJobDefinitionAzureResource: SparkJobDefinitionResource, + options?: coreHttp.OperationOptions + ): Promise>; +} diff --git a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/sqlPools.ts b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/sqlPools.ts new file mode 100644 index 000000000000..4f36e9842297 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/sqlPools.ts @@ -0,0 +1,28 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ + +import * as coreHttp from "@azure/core-http"; +import { SqlPoolsListResponse, SqlPoolsGetResponse } from "../models"; + +/** Interface representing a SqlPools. */ +export interface SqlPools { + /** + * List Sql Pools + * @param options The options parameters. + */ + list(options?: coreHttp.OperationOptions): Promise; + /** + * Get Sql Pool + * @param sqlPoolName The Sql Pool name + * @param options The options parameters. + */ + get( + sqlPoolName: string, + options?: coreHttp.OperationOptions + ): Promise; +} diff --git a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/sqlScript.ts b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/sqlScript.ts new file mode 100644 index 000000000000..717a740c11c6 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/sqlScript.ts @@ -0,0 +1,72 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ + +import "@azure/core-paging"; +import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import * as coreHttp from "@azure/core-http"; +import { LROPoller } from "../lro"; +import { + SqlScriptResource, + SqlScriptCreateOrUpdateSqlScriptOptionalParams, + SqlScriptCreateOrUpdateSqlScriptResponse, + SqlScriptGetSqlScriptOptionalParams, + SqlScriptGetSqlScriptResponse, + ArtifactRenameRequest +} from "../models"; + +/// +/** Interface representing a SqlScript. */ +export interface SqlScript { + /** + * Lists sql scripts. + * @param options The options parameters. + */ + listSqlScriptsByWorkspace( + options?: coreHttp.OperationOptions + ): PagedAsyncIterableIterator; + /** + * Creates or updates a Sql Script. + * @param sqlScriptName The sql script name. + * @param sqlScript Sql Script resource definition. + * @param options The options parameters. + */ + createOrUpdateSqlScript( + sqlScriptName: string, + sqlScript: SqlScriptResource, + options?: SqlScriptCreateOrUpdateSqlScriptOptionalParams + ): Promise>; + /** + * Gets a sql script. + * @param sqlScriptName The sql script name. + * @param options The options parameters. + */ + getSqlScript( + sqlScriptName: string, + options?: SqlScriptGetSqlScriptOptionalParams + ): Promise; + /** + * Deletes a Sql Script. + * @param sqlScriptName The sql script name. + * @param options The options parameters. + */ + deleteSqlScript( + sqlScriptName: string, + options?: coreHttp.OperationOptions + ): Promise>; + /** + * Renames a sqlScript. + * @param sqlScriptName The sql script name. + * @param request proposed new name. + * @param options The options parameters. + */ + renameSqlScript( + sqlScriptName: string, + request: ArtifactRenameRequest, + options?: coreHttp.OperationOptions + ): Promise>; +} diff --git a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/trigger.ts b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/trigger.ts new file mode 100644 index 000000000000..6836ec623c28 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/trigger.ts @@ -0,0 +1,108 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ + +import "@azure/core-paging"; +import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import * as coreHttp from "@azure/core-http"; +import { LROPoller } from "../lro"; +import { + TriggerResource, + TriggerCreateOrUpdateTriggerOptionalParams, + TriggerCreateOrUpdateTriggerResponse, + TriggerGetTriggerOptionalParams, + TriggerGetTriggerResponse, + TriggerSubscribeTriggerToEventsResponse, + TriggerGetEventSubscriptionStatusResponse, + TriggerUnsubscribeTriggerFromEventsResponse +} from "../models"; + +/// +/** Interface representing a Trigger. */ +export interface Trigger { + /** + * Lists triggers. + * @param options The options parameters. + */ + listTriggersByWorkspace( + options?: coreHttp.OperationOptions + ): PagedAsyncIterableIterator; + /** + * Creates or updates a trigger. + * @param triggerName The trigger name. + * @param trigger Trigger resource definition. + * @param options The options parameters. + */ + createOrUpdateTrigger( + triggerName: string, + trigger: TriggerResource, + options?: TriggerCreateOrUpdateTriggerOptionalParams + ): Promise>; + /** + * Gets a trigger. + * @param triggerName The trigger name. + * @param options The options parameters. + */ + getTrigger( + triggerName: string, + options?: TriggerGetTriggerOptionalParams + ): Promise; + /** + * Deletes a trigger. + * @param triggerName The trigger name. + * @param options The options parameters. + */ + deleteTrigger( + triggerName: string, + options?: coreHttp.OperationOptions + ): Promise>; + /** + * Subscribe event trigger to events. + * @param triggerName The trigger name. + * @param options The options parameters. + */ + subscribeTriggerToEvents( + triggerName: string, + options?: coreHttp.OperationOptions + ): Promise>; + /** + * Get a trigger's event subscription status. + * @param triggerName The trigger name. + * @param options The options parameters. + */ + getEventSubscriptionStatus( + triggerName: string, + options?: coreHttp.OperationOptions + ): Promise; + /** + * Unsubscribe event trigger from events. + * @param triggerName The trigger name. + * @param options The options parameters. + */ + unsubscribeTriggerFromEvents( + triggerName: string, + options?: coreHttp.OperationOptions + ): Promise>; + /** + * Starts a trigger. + * @param triggerName The trigger name. + * @param options The options parameters. + */ + startTrigger( + triggerName: string, + options?: coreHttp.OperationOptions + ): Promise>; + /** + * Stops a trigger. + * @param triggerName The trigger name. + * @param options The options parameters. + */ + stopTrigger( + triggerName: string, + options?: coreHttp.OperationOptions + ): Promise>; +} diff --git a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/triggerRun.ts b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/triggerRun.ts new file mode 100644 index 000000000000..da0789e055bb --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/triggerRun.ts @@ -0,0 +1,48 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ + +import * as coreHttp from "@azure/core-http"; +import { + RunFilterParameters, + TriggerRunQueryTriggerRunsByWorkspaceResponse +} from "../models"; + +/** Interface representing a TriggerRun. */ +export interface TriggerRun { + /** + * Rerun single trigger instance by runId. + * @param triggerName The trigger name. + * @param runId The pipeline run identifier. + * @param options The options parameters. + */ + rerunTriggerInstance( + triggerName: string, + runId: string, + options?: coreHttp.OperationOptions + ): Promise; + /** + * Cancel single trigger instance by runId. + * @param triggerName The trigger name. + * @param runId The pipeline run identifier. + * @param options The options parameters. + */ + cancelTriggerInstance( + triggerName: string, + runId: string, + options?: coreHttp.OperationOptions + ): Promise; + /** + * Query trigger runs. + * @param filterParameters Parameters to filter the pipeline run. + * @param options The options parameters. + */ + queryTriggerRunsByWorkspace( + filterParameters: RunFilterParameters, + options?: coreHttp.OperationOptions + ): Promise; +} diff --git a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/workspace.ts b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/workspace.ts new file mode 100644 index 000000000000..801fe591671f --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/workspace.ts @@ -0,0 +1,19 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ + +import * as coreHttp from "@azure/core-http"; +import { WorkspaceGetResponse } from "../models"; + +/** Interface representing a Workspace. */ +export interface Workspace { + /** + * Get Workspace + * @param options The options parameters. + */ + get(options?: coreHttp.OperationOptions): Promise; +} diff --git a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/workspaceGitRepoManagement.ts b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/workspaceGitRepoManagement.ts new file mode 100644 index 000000000000..079644d22deb --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/workspaceGitRepoManagement.ts @@ -0,0 +1,26 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ + +import { + GitHubAccessTokenRequest, + WorkspaceGitRepoManagementGetGitHubAccessTokenOptionalParams, + WorkspaceGitRepoManagementGetGitHubAccessTokenResponse +} from "../models"; + +/** Interface representing a WorkspaceGitRepoManagement. */ +export interface WorkspaceGitRepoManagement { + /** + * Get the GitHub access token. + * @param gitHubAccessTokenRequest + * @param options The options parameters. + */ + getGitHubAccessToken( + gitHubAccessTokenRequest: GitHubAccessTokenRequest, + options?: WorkspaceGitRepoManagementGetGitHubAccessTokenOptionalParams + ): Promise; +} diff --git a/sdk/synapse/synapse-artifacts/src/tracing.ts b/sdk/synapse/synapse-artifacts/src/tracing.ts index e23527cc70e8..da10eee3f3c2 100644 --- a/sdk/synapse/synapse-artifacts/src/tracing.ts +++ b/sdk/synapse/synapse-artifacts/src/tracing.ts @@ -6,18 +6,9 @@ * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -import { createSpanFunction, OperationTracingOptions } from "@azure/core-tracing"; -import { Span } from "@azure/core-tracing"; +import { createSpanFunction } from "@azure/core-tracing"; -export const createSpan: ( - operationName: string, - operationOptions: T | undefined -) => { - span: Span; - updatedOptions: T; -} = createSpanFunction({ +export const createSpan = createSpanFunction({ namespace: "Azure.Synapse.Artifacts", packagePrefix: "Microsoft.Synapse" }); diff --git a/sdk/synapse/synapse-artifacts/swagger/README.md b/sdk/synapse/synapse-artifacts/swagger/README.md index c24abc31a56f..e14b60de2ca5 100644 --- a/sdk/synapse/synapse-artifacts/swagger/README.md +++ b/sdk/synapse/synapse-artifacts/swagger/README.md @@ -6,7 +6,7 @@ ```yaml package-name: "@azure/synapse-artifacts" -require: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/master/specification/synapse/data-plane/readme.md +require: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/f953424dd168e71373bc52edb9713d2d86a14ada/specification/synapse/data-plane/readme.md use-extension: "@autorest/typescript": "latest" From 080b77c6850e23d156b09e868da97a0d780bc0f8 Mon Sep 17 00:00:00 2001 From: Jose Manuel Heredia Hidalgo Date: Thu, 1 Apr 2021 23:11:02 +0000 Subject: [PATCH 2/6] Format --- .../src/artifactsClientContext.ts | 14 +-- .../src/lro/azureAsyncOperationStrategy.ts | 42 ++------- .../src/lro/bodyPollingStrategy.ts | 5 +- .../src/lro/locationStrategy.ts | 7 +- .../synapse-artifacts/src/lro/lroPolicy.ts | 8 +- .../synapse-artifacts/src/lro/lroPoller.ts | 27 +----- .../synapse-artifacts/src/lro/models.ts | 8 +- .../synapse-artifacts/src/lro/operation.ts | 4 +- .../synapse-artifacts/src/lro/requestUtils.ts | 16 +--- .../synapse-artifacts/src/models/index.ts | 66 ++++--------- .../synapse-artifacts/src/models/mappers.ts | 55 ++++------- .../src/operations/bigDataPools.ts | 32 ++----- .../src/operations/dataFlow.ts | 38 ++------ .../src/operations/dataFlowDebugSession.ts | 47 +++------- .../src/operations/dataset.ts | 38 ++------ .../src/operations/integrationRuntimes.ts | 37 ++------ .../src/operations/library.ts | 93 ++++--------------- .../src/operations/linkedService.ts | 32 ++----- .../src/operations/notebook.ts | 55 +++-------- .../src/operations/pipeline.ts | 47 ++-------- .../src/operations/pipelineRun.ts | 37 ++------ .../src/operations/sparkJobDefinition.ts | 31 ++----- .../src/operations/sqlPools.ts | 32 ++----- .../src/operations/sqlScript.ts | 38 ++------ .../src/operations/trigger.ts | 47 ++-------- .../src/operations/triggerRun.ts | 29 ++---- .../src/operations/workspace.ts | 18 +--- .../operations/workspaceGitRepoManagement.ts | 13 +-- .../integrationRuntimes.ts | 9 +- .../src/operationsInterfaces/library.ts | 10 +- .../sparkJobDefinition.ts | 4 +- .../src/operationsInterfaces/sqlPools.ts | 5 +- .../src/operationsInterfaces/triggerRun.ts | 5 +- 33 files changed, 214 insertions(+), 735 deletions(-) diff --git a/sdk/synapse/synapse-artifacts/src/artifactsClientContext.ts b/sdk/synapse/synapse-artifacts/src/artifactsClientContext.ts index 580169496b45..0403da8ea465 100644 --- a/sdk/synapse/synapse-artifacts/src/artifactsClientContext.ts +++ b/sdk/synapse/synapse-artifacts/src/artifactsClientContext.ts @@ -56,17 +56,12 @@ export class ArtifactsClientContext extends coreHttp.ServiceClient { // When an array of factories is passed in, we'll just add the required factories, // in this case lroPolicy(). It is important to note that passing an array of factories // to a new client, bypasses core-http default factories. Just the pipelines provided will be run. - options.requestPolicyFactories = [ - lroPolicy(), - ...options.requestPolicyFactories - ]; + options.requestPolicyFactories = [lroPolicy(), ...options.requestPolicyFactories]; } else if (options.requestPolicyFactories) { // When we were passed a requestPolicyFactories as a function, we'll create a new one that adds the factories provided // in the options plus the required policies. When using this path, the pipelines passed to the client will be added to the // default policies added by core-http - const optionsPolicies = options.requestPolicyFactories([lroPolicy()]) || [ - lroPolicy() - ]; + const optionsPolicies = options.requestPolicyFactories([lroPolicy()]) || [lroPolicy()]; options.requestPolicyFactories = (defaultFactories) => [ ...optionsPolicies, ...defaultFactories @@ -74,10 +69,7 @@ export class ArtifactsClientContext extends coreHttp.ServiceClient { } else { // In case no request policy factories were provided, we'll just need to create a function that will add // the lroPolicy to the default pipelines added by core-http - options.requestPolicyFactories = (defaultFactories) => [ - lroPolicy(), - ...defaultFactories - ]; + options.requestPolicyFactories = (defaultFactories) => [lroPolicy(), ...defaultFactories]; } super(credentials, options); diff --git a/sdk/synapse/synapse-artifacts/src/lro/azureAsyncOperationStrategy.ts b/sdk/synapse/synapse-artifacts/src/lro/azureAsyncOperationStrategy.ts index f54ccc954f04..542685ccb1bd 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/azureAsyncOperationStrategy.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/azureAsyncOperationStrategy.ts @@ -14,11 +14,7 @@ import { FinalStateVia, LROSYM } from "./models"; -import { - OperationSpec, - OperationArguments, - OperationResponse -} from "@azure/core-http"; +import { OperationSpec, OperationArguments, OperationResponse } from "@azure/core-http"; import { terminalStates } from "./constants"; import { SendOperationFn } from "."; @@ -29,14 +25,11 @@ export function createAzureAsyncOperationStrategy( ): LROStrategy { const lroData = initialOperation.result._response[LROSYM]; if (!lroData) { - throw new Error( - "Expected lroData to be defined for Azure-AsyncOperation strategy" - ); + throw new Error("Expected lroData to be defined for Azure-AsyncOperation strategy"); } let currentOperation = initialOperation; - let lastKnownPollingUrl = - lroData.azureAsyncOperation || lroData.operationLocation; + let lastKnownPollingUrl = lroData.azureAsyncOperation || lroData.operationLocation; return { isTerminal: () => { @@ -67,17 +60,12 @@ export function createAzureAsyncOperationStrategy( const initialOperationResult = initialOperation.result._response[LROSYM]; const currentOperationResult = currentOperation.result._response[LROSYM]; - if ( - !shouldPerformFinalGet(initialOperationResult, currentOperationResult) - ) { + if (!shouldPerformFinalGet(initialOperationResult, currentOperationResult)) { return currentOperation; } if (initialOperationResult?.requestMethod === "PUT") { - currentOperation = await sendFinalGet( - initialOperation, - sendOperationFn - ); + currentOperation = await sendFinalGet(initialOperation, sendOperationFn); return currentOperation; } @@ -85,29 +73,20 @@ export function createAzureAsyncOperationStrategy( if (initialOperationResult?.location) { switch (finalStateVia) { case "original-uri": - currentOperation = await sendFinalGet( - initialOperation, - sendOperationFn - ); + currentOperation = await sendFinalGet(initialOperation, sendOperationFn); return currentOperation; case "azure-async-operation": return currentOperation; case "location": default: - const location = - initialOperationResult.location || - currentOperationResult?.location; + const location = initialOperationResult.location || currentOperationResult?.location; if (!location) { throw new Error("Couldn't determine final GET URL from location"); } - return await sendFinalGet( - initialOperation, - sendOperationFn, - location - ); + return await sendFinalGet(initialOperation, sendOperationFn, location); } } @@ -185,10 +164,7 @@ function getCompositeMappers(responses: { }, {} as { [responseCode: string]: OperationResponse }); } -function shouldPerformFinalGet( - initialResult?: LROResponseInfo, - currentResult?: LROResponseInfo -) { +function shouldPerformFinalGet(initialResult?: LROResponseInfo, currentResult?: LROResponseInfo) { const { status } = currentResult || {}; const { requestMethod: initialRequestMethod, location } = initialResult || {}; if (status && status.toLowerCase() !== "succeeded") { diff --git a/sdk/synapse/synapse-artifacts/src/lro/bodyPollingStrategy.ts b/sdk/synapse/synapse-artifacts/src/lro/bodyPollingStrategy.ts index c9404330ed76..dba5f3280cd0 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/bodyPollingStrategy.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/bodyPollingStrategy.ts @@ -52,10 +52,7 @@ export function createBodyPollingStrategy( }; // Execute the polling operation - initialOperation.result = await sendOperation( - initialOperation.args, - pollingSpec - ); + initialOperation.result = await sendOperation(initialOperation.args, pollingSpec); return initialOperation; } }; diff --git a/sdk/synapse/synapse-artifacts/src/lro/locationStrategy.ts b/sdk/synapse/synapse-artifacts/src/lro/locationStrategy.ts index 6a763e37ecd1..1cfb103ecacc 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/locationStrategy.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/locationStrategy.ts @@ -16,9 +16,7 @@ export function createLocationStrategy( ): LROStrategy { const lroData = initialOperation.result._response[LROSYM]; if (!lroData) { - throw new Error( - "Expected lroData to be defined for Azure-AsyncOperation strategy" - ); + throw new Error("Expected lroData to be defined for Azure-AsyncOperation strategy"); } let currentOperation = initialOperation; @@ -59,8 +57,7 @@ export function createLocationStrategy( const result = await sendOperationFn(pollingArgs, pollingSpec); // Update latest polling url - lastKnownPollingUrl = - result._response[LROSYM]?.location || lastKnownPollingUrl; + lastKnownPollingUrl = result._response[LROSYM]?.location || lastKnownPollingUrl; // Update lastOperation result currentOperation = { diff --git a/sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts b/sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts index 4d18c3b7f0ce..94c6d089d5dc 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts @@ -29,12 +29,8 @@ class LROPolicy extends BaseRequestPolicy { super(nextPolicy, options); } - public async sendRequest( - webResource: WebResource - ): Promise { - let result: LROOperationResponse = await this._nextPolicy.sendRequest( - webResource - ); + public async sendRequest(webResource: WebResource): Promise { + let result: LROOperationResponse = await this._nextPolicy.sendRequest(webResource); const _lroData = getLROData(result); result[LROSYM] = _lroData; diff --git a/sdk/synapse/synapse-artifacts/src/lro/lroPoller.ts b/sdk/synapse/synapse-artifacts/src/lro/lroPoller.ts index 8c0406cf2468..3fff20ddaa9c 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/lroPoller.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/lroPoller.ts @@ -7,19 +7,8 @@ */ import { Poller } from "@azure/core-lro"; -import { - OperationSpec, - OperationArguments, - delay, - RestError -} from "@azure/core-http"; -import { - BaseResult, - LROOperationState, - LROOperationStep, - FinalStateVia, - LROSYM -} from "./models"; +import { OperationSpec, OperationArguments, delay, RestError } from "@azure/core-http"; +import { BaseResult, LROOperationState, LROOperationStep, FinalStateVia, LROSYM } from "./models"; import { makeOperation } from "./operation"; import { createBodyPollingStrategy } from "./bodyPollingStrategy"; import { createAzureAsyncOperationStrategy } from "./azureAsyncOperationStrategy"; @@ -78,11 +67,7 @@ export class LROPoller extends Poller< result: initialOperationResult }; - const pollingStrategy = getPollingStrategy( - initialOperation, - sendOperation, - finalStateVia - ); + const pollingStrategy = getPollingStrategy(initialOperation, sendOperation, finalStateVia); const state: LROOperationState = { // Initial operation will become the last operation @@ -135,11 +120,7 @@ function getPollingStrategy( } if (lroData.azureAsyncOperation || lroData.operationLocation) { - return createAzureAsyncOperationStrategy( - initialOperation, - sendOperationFn, - finalStateVia - ); + return createAzureAsyncOperationStrategy(initialOperation, sendOperationFn, finalStateVia); } if (lroData.location) { diff --git a/sdk/synapse/synapse-artifacts/src/lro/models.ts b/sdk/synapse/synapse-artifacts/src/lro/models.ts index c4b96664f664..73502255a4dd 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/models.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/models.ts @@ -16,10 +16,7 @@ import { import { PollOperationState, PollOperation } from "@azure/core-lro"; export const LROSYM = Symbol("LROData"); -export type FinalStateVia = - | "azure-async-operation" - | "location" - | "original-uri"; +export type FinalStateVia = "azure-async-operation" | "location" | "original-uri"; export interface LROResponseInfo { requestMethod: HttpMethods; @@ -55,8 +52,7 @@ export interface LROOperationStep { result: TResult; } -export interface LROOperationState - extends PollOperationState { +export interface LROOperationState extends PollOperationState { lastOperation: LROOperationStep; initialOperation: LROOperationStep; pollingStrategy: LROStrategy; diff --git a/sdk/synapse/synapse-artifacts/src/lro/operation.ts b/sdk/synapse/synapse-artifacts/src/lro/operation.ts index 0860f07df0b7..dc299b243c55 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/operation.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/operation.ts @@ -49,9 +49,7 @@ async function update( const currentLroData = currentResponse.result._response[LROSYM]; if (!currentLroData) { - throw new Error( - "Expected lroData to be defined for updating LRO operation" - ); + throw new Error("Expected lroData to be defined for updating LRO operation"); } if (state.result) { diff --git a/sdk/synapse/synapse-artifacts/src/lro/requestUtils.ts b/sdk/synapse/synapse-artifacts/src/lro/requestUtils.ts index e9af4cde5e25..3a6986f5a8fd 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/requestUtils.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/requestUtils.ts @@ -29,17 +29,10 @@ export function shouldDeserializeLRO(finalStateVia?: string) { isInitialRequest = false; } - if ( - initialOperationInfo.azureAsyncOperation || - initialOperationInfo.operationLocation - ) { + if (initialOperationInfo.azureAsyncOperation || initialOperationInfo.operationLocation) { return ( !isInitialRequest && - isAsyncOperationFinalResponse( - response, - initialOperationInfo, - finalStateVia - ) + isAsyncOperationFinalResponse(response, initialOperationInfo, finalStateVia) ); } @@ -77,10 +70,7 @@ function isAsyncOperationFinalResponse( return true; } - if ( - initialOperationInfo.requestMethod !== "PUT" && - !initialOperationInfo.location - ) { + if (initialOperationInfo.requestMethod !== "PUT" && !initialOperationInfo.location) { return true; } diff --git a/sdk/synapse/synapse-artifacts/src/models/index.ts b/sdk/synapse/synapse-artifacts/src/models/index.ts index 16944c3f92d4..86a54bfe4dd5 100644 --- a/sdk/synapse/synapse-artifacts/src/models/index.ts +++ b/sdk/synapse/synapse-artifacts/src/models/index.ts @@ -220,10 +220,7 @@ export type IntegrationRuntimeUnion = | IntegrationRuntime | ManagedIntegrationRuntime | SelfHostedIntegrationRuntime; -export type SecretBaseUnion = - | SecretBase - | SecureString - | AzureKeyVaultSecretReference; +export type SecretBaseUnion = SecretBase | SecureString | AzureKeyVaultSecretReference; export type DatasetLocationUnion = | DatasetLocation | AzureBlobStorageLocation @@ -2452,12 +2449,7 @@ export interface DatasetSchemaDataElement { /** The format definition of a storage. */ export interface DatasetStorageFormat { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: - | "TextFormat" - | "JsonFormat" - | "AvroFormat" - | "OrcFormat" - | "ParquetFormat"; + type: "TextFormat" | "JsonFormat" | "AvroFormat" | "OrcFormat" | "ParquetFormat"; /** Describes unknown properties. The value of an unknown property can be of "any" type. */ [property: string]: any; /** Serializer. Type: string (or Expression with resultType string). */ @@ -2562,11 +2554,7 @@ export interface DistcpSettings { /** Format read settings. */ export interface FormatReadSettings { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: - | "DelimitedTextReadSettings" - | "JsonReadSettings" - | "XmlReadSettings" - | "BinaryReadSettings"; + type: "DelimitedTextReadSettings" | "JsonReadSettings" | "XmlReadSettings" | "BinaryReadSettings"; /** Describes unknown properties. The value of an unknown property can be of "any" type. */ [property: string]: any; } @@ -11314,8 +11302,7 @@ export type LinkedServiceCreateOrUpdateLinkedServiceResponse = LinkedServiceReso }; /** Optional parameters. */ -export interface LinkedServiceGetLinkedServiceOptionalParams - extends coreHttp.OperationOptions { +export interface LinkedServiceGetLinkedServiceOptionalParams extends coreHttp.OperationOptions { /** ETag of the linked service entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ ifNoneMatch?: string; } @@ -11357,8 +11344,7 @@ export type DatasetGetDatasetsByWorkspaceResponse = DatasetListResponse & { }; /** Optional parameters. */ -export interface DatasetCreateOrUpdateDatasetOptionalParams - extends coreHttp.OperationOptions { +export interface DatasetCreateOrUpdateDatasetOptionalParams extends coreHttp.OperationOptions { /** ETag of the dataset entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ ifMatch?: string; } @@ -11378,8 +11364,7 @@ export type DatasetCreateOrUpdateDatasetResponse = DatasetResource & { }; /** Optional parameters. */ -export interface DatasetGetDatasetOptionalParams - extends coreHttp.OperationOptions { +export interface DatasetGetDatasetOptionalParams extends coreHttp.OperationOptions { /** ETag of the dataset entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ ifNoneMatch?: string; } @@ -11421,8 +11406,7 @@ export type PipelineGetPipelinesByWorkspaceResponse = PipelineListResponse & { }; /** Optional parameters. */ -export interface PipelineCreateOrUpdatePipelineOptionalParams - extends coreHttp.OperationOptions { +export interface PipelineCreateOrUpdatePipelineOptionalParams extends coreHttp.OperationOptions { /** ETag of the pipeline entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ ifMatch?: string; } @@ -11442,8 +11426,7 @@ export type PipelineCreateOrUpdatePipelineResponse = PipelineResource & { }; /** Optional parameters. */ -export interface PipelineGetPipelineOptionalParams - extends coreHttp.OperationOptions { +export interface PipelineGetPipelineOptionalParams extends coreHttp.OperationOptions { /** ETag of the pipeline entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ ifNoneMatch?: string; } @@ -11461,8 +11444,7 @@ export type PipelineGetPipelineResponse = PipelineResource & { }; /** Optional parameters. */ -export interface PipelineCreatePipelineRunOptionalParams - extends coreHttp.OperationOptions { +export interface PipelineCreatePipelineRunOptionalParams extends coreHttp.OperationOptions { /** Parameters of the pipeline run. These parameters will be used only if the runId is not specified. */ parameters?: { [propertyName: string]: any }; /** The pipeline run identifier. If run ID is specified the parameters of the specified run will be used to create a new run. */ @@ -11534,8 +11516,7 @@ export type PipelineRunQueryActivityRunsResponse = ActivityRunsQueryResponse & { }; /** Optional parameters. */ -export interface PipelineRunCancelPipelineRunOptionalParams - extends coreHttp.OperationOptions { +export interface PipelineRunCancelPipelineRunOptionalParams extends coreHttp.OperationOptions { /** If true, cancel all the Child pipelines that are triggered by the current pipeline. */ isRecursive?: boolean; } @@ -11553,8 +11534,7 @@ export type TriggerGetTriggersByWorkspaceResponse = TriggerListResponse & { }; /** Optional parameters. */ -export interface TriggerCreateOrUpdateTriggerOptionalParams - extends coreHttp.OperationOptions { +export interface TriggerCreateOrUpdateTriggerOptionalParams extends coreHttp.OperationOptions { /** ETag of the trigger entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ ifMatch?: string; } @@ -11574,8 +11554,7 @@ export type TriggerCreateOrUpdateTriggerResponse = TriggerResource & { }; /** Optional parameters. */ -export interface TriggerGetTriggerOptionalParams - extends coreHttp.OperationOptions { +export interface TriggerGetTriggerOptionalParams extends coreHttp.OperationOptions { /** ETag of the trigger entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ ifNoneMatch?: string; } @@ -11657,8 +11636,7 @@ export type TriggerRunQueryTriggerRunsByWorkspaceResponse = TriggerRunsQueryResp }; /** Optional parameters. */ -export interface DataFlowCreateOrUpdateDataFlowOptionalParams - extends coreHttp.OperationOptions { +export interface DataFlowCreateOrUpdateDataFlowOptionalParams extends coreHttp.OperationOptions { /** ETag of the data flow entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ ifMatch?: string; } @@ -11678,8 +11656,7 @@ export type DataFlowCreateOrUpdateDataFlowResponse = DataFlowResource & { }; /** Optional parameters. */ -export interface DataFlowGetDataFlowOptionalParams - extends coreHttp.OperationOptions { +export interface DataFlowGetDataFlowOptionalParams extends coreHttp.OperationOptions { /** ETag of the data flow entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ ifNoneMatch?: string; } @@ -11797,8 +11774,7 @@ export type SqlScriptGetSqlScriptsByWorkspaceResponse = SqlScriptsListResponse & }; /** Optional parameters. */ -export interface SqlScriptCreateOrUpdateSqlScriptOptionalParams - extends coreHttp.OperationOptions { +export interface SqlScriptCreateOrUpdateSqlScriptOptionalParams extends coreHttp.OperationOptions { /** ETag of the SQL script entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ ifMatch?: string; } @@ -11818,8 +11794,7 @@ export type SqlScriptCreateOrUpdateSqlScriptResponse = SqlScriptResource & { }; /** Optional parameters. */ -export interface SqlScriptGetSqlScriptOptionalParams - extends coreHttp.OperationOptions { +export interface SqlScriptGetSqlScriptOptionalParams extends coreHttp.OperationOptions { /** ETag of the sql compute entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ ifNoneMatch?: string; } @@ -11965,8 +11940,7 @@ export type NotebookGetNotebookSummaryByWorkSpaceResponse = NotebookListResponse }; /** Optional parameters. */ -export interface NotebookCreateOrUpdateNotebookOptionalParams - extends coreHttp.OperationOptions { +export interface NotebookCreateOrUpdateNotebookOptionalParams extends coreHttp.OperationOptions { /** ETag of the Note book entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ ifMatch?: string; } @@ -11986,8 +11960,7 @@ export type NotebookCreateOrUpdateNotebookResponse = NotebookResource & { }; /** Optional parameters. */ -export interface NotebookGetNotebookOptionalParams - extends coreHttp.OperationOptions { +export interface NotebookGetNotebookOptionalParams extends coreHttp.OperationOptions { /** ETag of the Notebook entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ ifNoneMatch?: string; } @@ -12186,8 +12159,7 @@ export type WorkspaceGitRepoManagementGetGitHubAccessTokenResponse = GitHubAcces }; /** Optional parameters. */ -export interface ArtifactsClientOptionalParams - extends coreHttp.ServiceClientOptions { +export interface ArtifactsClientOptionalParams extends coreHttp.ServiceClientOptions { /** Api Version */ apiVersion?: string; /** Overrides client endpoint. */ diff --git a/sdk/synapse/synapse-artifacts/src/models/mappers.ts b/sdk/synapse/synapse-artifacts/src/models/mappers.ts index 40d244b6eaf3..20955b669c05 100644 --- a/sdk/synapse/synapse-artifacts/src/models/mappers.ts +++ b/sdk/synapse/synapse-artifacts/src/models/mappers.ts @@ -15786,8 +15786,7 @@ export const TextFormat: coreHttp.CompositeMapper = { className: "TextFormat", uberParent: "DatasetStorageFormat", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: - DatasetStorageFormat.type.polymorphicDiscriminator, + polymorphicDiscriminator: DatasetStorageFormat.type.polymorphicDiscriminator, modelProperties: { ...DatasetStorageFormat.type.modelProperties, columnDelimiter: { @@ -15855,8 +15854,7 @@ export const JsonFormat: coreHttp.CompositeMapper = { className: "JsonFormat", uberParent: "DatasetStorageFormat", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: - DatasetStorageFormat.type.polymorphicDiscriminator, + polymorphicDiscriminator: DatasetStorageFormat.type.polymorphicDiscriminator, modelProperties: { ...DatasetStorageFormat.type.modelProperties, filePattern: { @@ -15900,8 +15898,7 @@ export const AvroFormat: coreHttp.CompositeMapper = { className: "AvroFormat", uberParent: "DatasetStorageFormat", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: - DatasetStorageFormat.type.polymorphicDiscriminator, + polymorphicDiscriminator: DatasetStorageFormat.type.polymorphicDiscriminator, modelProperties: { ...DatasetStorageFormat.type.modelProperties } @@ -15915,8 +15912,7 @@ export const OrcFormat: coreHttp.CompositeMapper = { className: "OrcFormat", uberParent: "DatasetStorageFormat", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: - DatasetStorageFormat.type.polymorphicDiscriminator, + polymorphicDiscriminator: DatasetStorageFormat.type.polymorphicDiscriminator, modelProperties: { ...DatasetStorageFormat.type.modelProperties } @@ -15930,8 +15926,7 @@ export const ParquetFormat: coreHttp.CompositeMapper = { className: "ParquetFormat", uberParent: "DatasetStorageFormat", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: - DatasetStorageFormat.type.polymorphicDiscriminator, + polymorphicDiscriminator: DatasetStorageFormat.type.polymorphicDiscriminator, modelProperties: { ...DatasetStorageFormat.type.modelProperties } @@ -16052,8 +16047,7 @@ export const WebAnonymousAuthentication: coreHttp.CompositeMapper = { name: "Composite", className: "WebAnonymousAuthentication", uberParent: "WebLinkedServiceTypeProperties", - polymorphicDiscriminator: - WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, + polymorphicDiscriminator: WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, modelProperties: { ...WebLinkedServiceTypeProperties.type.modelProperties } @@ -16066,8 +16060,7 @@ export const WebBasicAuthentication: coreHttp.CompositeMapper = { name: "Composite", className: "WebBasicAuthentication", uberParent: "WebLinkedServiceTypeProperties", - polymorphicDiscriminator: - WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, + polymorphicDiscriminator: WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, modelProperties: { ...WebLinkedServiceTypeProperties.type.modelProperties, username: { @@ -16094,8 +16087,7 @@ export const WebClientCertificateAuthentication: coreHttp.CompositeMapper = { name: "Composite", className: "WebClientCertificateAuthentication", uberParent: "WebLinkedServiceTypeProperties", - polymorphicDiscriminator: - WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, + polymorphicDiscriminator: WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, modelProperties: { ...WebLinkedServiceTypeProperties.type.modelProperties, pfx: { @@ -17124,8 +17116,7 @@ export const ZipDeflateReadSettings: coreHttp.CompositeMapper = { className: "ZipDeflateReadSettings", uberParent: "CompressionReadSettings", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: - CompressionReadSettings.type.polymorphicDiscriminator, + polymorphicDiscriminator: CompressionReadSettings.type.polymorphicDiscriminator, modelProperties: { ...CompressionReadSettings.type.modelProperties, preserveZipFileNameAsFolder: { @@ -17145,8 +17136,7 @@ export const TarReadSettings: coreHttp.CompositeMapper = { className: "TarReadSettings", uberParent: "CompressionReadSettings", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: - CompressionReadSettings.type.polymorphicDiscriminator, + polymorphicDiscriminator: CompressionReadSettings.type.polymorphicDiscriminator, modelProperties: { ...CompressionReadSettings.type.modelProperties, preserveCompressionFileNameAsFolder: { @@ -17166,8 +17156,7 @@ export const TarGZipReadSettings: coreHttp.CompositeMapper = { className: "TarGZipReadSettings", uberParent: "CompressionReadSettings", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: - CompressionReadSettings.type.polymorphicDiscriminator, + polymorphicDiscriminator: CompressionReadSettings.type.polymorphicDiscriminator, modelProperties: { ...CompressionReadSettings.type.modelProperties, preserveCompressionFileNameAsFolder: { @@ -19941,9 +19930,7 @@ export const SelfDependencyTumblingWindowTriggerReference: coreHttp.CompositeMap ...DependencyReference.type.modelProperties, offset: { constraints: { - Pattern: new RegExp( - "-((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9]))" - ), + Pattern: new RegExp("-((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9]))"), MaxLength: 15, MinLength: 8 }, @@ -19955,9 +19942,7 @@ export const SelfDependencyTumblingWindowTriggerReference: coreHttp.CompositeMap }, size: { constraints: { - Pattern: new RegExp( - "((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9]))" - ), + Pattern: new RegExp("((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9]))"), MaxLength: 15, MinLength: 8 }, @@ -19976,8 +19961,7 @@ export const LinkedIntegrationRuntimeKeyAuthorization: coreHttp.CompositeMapper name: "Composite", className: "LinkedIntegrationRuntimeKeyAuthorization", uberParent: "LinkedIntegrationRuntimeType", - polymorphicDiscriminator: - LinkedIntegrationRuntimeType.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedIntegrationRuntimeType.type.polymorphicDiscriminator, modelProperties: { ...LinkedIntegrationRuntimeType.type.modelProperties, key: { @@ -19997,8 +19981,7 @@ export const LinkedIntegrationRuntimeRbacAuthorization: coreHttp.CompositeMapper name: "Composite", className: "LinkedIntegrationRuntimeRbacAuthorization", uberParent: "LinkedIntegrationRuntimeType", - polymorphicDiscriminator: - LinkedIntegrationRuntimeType.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedIntegrationRuntimeType.type.polymorphicDiscriminator, modelProperties: { ...LinkedIntegrationRuntimeType.type.modelProperties, resourceId: { @@ -23873,9 +23856,7 @@ export const TumblingWindowTriggerDependencyReference: coreHttp.CompositeMapper ...TriggerDependencyReference.type.modelProperties, offset: { constraints: { - Pattern: new RegExp( - "-?((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9]))" - ), + Pattern: new RegExp("-?((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9]))"), MaxLength: 15, MinLength: 8 }, @@ -23886,9 +23867,7 @@ export const TumblingWindowTriggerDependencyReference: coreHttp.CompositeMapper }, size: { constraints: { - Pattern: new RegExp( - "((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9]))" - ), + Pattern: new RegExp("((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9]))"), MaxLength: 15, MinLength: 8 }, diff --git a/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts b/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts index 3ede037709f5..4c8dca3317fd 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts @@ -31,23 +31,13 @@ export class BigDataPoolsImpl implements BigDataPools { * List Big Data Pools * @param options The options parameters. */ - async list( - options?: coreHttp.OperationOptions - ): Promise { - const { span, updatedOptions } = createSpan( - "ArtifactsClient-list", - options || {} - ); + async list(options?: coreHttp.OperationOptions): Promise { + const { span, updatedOptions } = createSpan("ArtifactsClient-list", options || {}); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { - const result = await this.client.sendOperationRequest( - operationArguments, - listOperationSpec - ); + const result = await this.client.sendOperationRequest(operationArguments, listOperationSpec); return result as BigDataPoolsListResponse; } catch (error) { span.setStatus({ @@ -69,21 +59,13 @@ export class BigDataPoolsImpl implements BigDataPools { bigDataPoolName: string, options?: coreHttp.OperationOptions ): Promise { - const { span, updatedOptions } = createSpan( - "ArtifactsClient-get", - options || {} - ); + const { span, updatedOptions } = createSpan("ArtifactsClient-get", options || {}); const operationArguments: coreHttp.OperationArguments = { bigDataPoolName, - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { - const result = await this.client.sendOperationRequest( - operationArguments, - getOperationSpec - ); + const result = await this.client.sendOperationRequest(operationArguments, getOperationSpec); return result as BigDataPoolsGetResponse; } catch (error) { span.setStatus({ diff --git a/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts b/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts index d7c22b2ab841..94e74722c0cf 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts @@ -68,10 +68,7 @@ export class DataFlowImpl implements DataFlow { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getDataFlowsByWorkspaceNext( - continuationToken, - options - ); + result = await this._getDataFlowsByWorkspaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -144,15 +141,10 @@ export class DataFlowImpl implements DataFlow { dataFlowName: string, options?: DataFlowGetDataFlowOptionalParams ): Promise { - const { span, updatedOptions } = createSpan( - "ArtifactsClient-getDataFlow", - options || {} - ); + const { span, updatedOptions } = createSpan("ArtifactsClient-getDataFlow", options || {}); const operationArguments: coreHttp.OperationArguments = { dataFlowName, - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { const result = await this.client.sendOperationRequest( @@ -180,10 +172,7 @@ export class DataFlowImpl implements DataFlow { dataFlowName: string, options?: coreHttp.OperationOptions ): Promise> { - const { span, updatedOptions } = createSpan( - "ArtifactsClient-deleteDataFlow", - options || {} - ); + const { span, updatedOptions } = createSpan("ArtifactsClient-deleteDataFlow", options || {}); const operationArguments: coreHttp.OperationArguments = { dataFlowName, options: this.getOperationOptions(updatedOptions, "undefined") @@ -229,10 +218,7 @@ export class DataFlowImpl implements DataFlow { request: ArtifactRenameRequest, options?: coreHttp.OperationOptions ): Promise> { - const { span, updatedOptions } = createSpan( - "ArtifactsClient-renameDataFlow", - options || {} - ); + const { span, updatedOptions } = createSpan("ArtifactsClient-renameDataFlow", options || {}); const operationArguments: coreHttp.OperationArguments = { dataFlowName, request, @@ -280,9 +266,7 @@ export class DataFlowImpl implements DataFlow { options || {} ); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { const result = await this.client.sendOperationRequest( @@ -317,9 +301,7 @@ export class DataFlowImpl implements DataFlow { ); const operationArguments: coreHttp.OperationArguments = { nextLink, - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { const result = await this.client.sendOperationRequest( @@ -376,11 +358,7 @@ const createOrUpdateDataFlowOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.dataFlow, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.dataFlowName], - headerParameters: [ - Parameters.accept, - Parameters.contentType, - Parameters.ifMatch - ], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts b/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts index 40d6b4c0abf5..d199d6093fb7 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts @@ -70,10 +70,7 @@ export class DataFlowDebugSessionImpl implements DataFlowDebugSession { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._queryDataFlowDebugSessionsByWorkspaceNext( - continuationToken, - options - ); + result = await this._queryDataFlowDebugSessionsByWorkspaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -82,9 +79,7 @@ export class DataFlowDebugSessionImpl implements DataFlowDebugSession { private async *queryDataFlowDebugSessionsByWorkspacePagingAll( options?: coreHttp.OperationOptions ): AsyncIterableIterator { - for await (const page of this.queryDataFlowDebugSessionsByWorkspacePagingPage( - options - )) { + for await (const page of this.queryDataFlowDebugSessionsByWorkspacePagingPage(options)) { yield* page; } } @@ -97,9 +92,7 @@ export class DataFlowDebugSessionImpl implements DataFlowDebugSession { async createDataFlowDebugSession( request: CreateDataFlowDebugSessionRequest, options?: coreHttp.OperationOptions - ): Promise< - LROPoller - > { + ): Promise> { const { span, updatedOptions } = createSpan( "ArtifactsClient-createDataFlowDebugSession", options || {} @@ -144,17 +137,13 @@ export class DataFlowDebugSessionImpl implements DataFlowDebugSession { */ private async _queryDataFlowDebugSessionsByWorkspace( options?: coreHttp.OperationOptions - ): Promise< - DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceResponse - > { + ): Promise { const { span, updatedOptions } = createSpan( "ArtifactsClient-_queryDataFlowDebugSessionsByWorkspace", options || {} ); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { const result = await this.client.sendOperationRequest( @@ -182,15 +171,10 @@ export class DataFlowDebugSessionImpl implements DataFlowDebugSession { request: DataFlowDebugPackage, options?: coreHttp.OperationOptions ): Promise { - const { span, updatedOptions } = createSpan( - "ArtifactsClient-addDataFlow", - options || {} - ); + const { span, updatedOptions } = createSpan("ArtifactsClient-addDataFlow", options || {}); const operationArguments: coreHttp.OperationArguments = { request, - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { const result = await this.client.sendOperationRequest( @@ -224,9 +208,7 @@ export class DataFlowDebugSessionImpl implements DataFlowDebugSession { ); const operationArguments: coreHttp.OperationArguments = { request, - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { const result = await this.client.sendOperationRequest( @@ -254,10 +236,7 @@ export class DataFlowDebugSessionImpl implements DataFlowDebugSession { request: DataFlowDebugCommandRequest, options?: coreHttp.OperationOptions ): Promise> { - const { span, updatedOptions } = createSpan( - "ArtifactsClient-executeCommand", - options || {} - ); + const { span, updatedOptions } = createSpan("ArtifactsClient-executeCommand", options || {}); const operationArguments: coreHttp.OperationArguments = { request, options: this.getOperationOptions(updatedOptions, "undefined") @@ -301,18 +280,14 @@ export class DataFlowDebugSessionImpl implements DataFlowDebugSession { private async _queryDataFlowDebugSessionsByWorkspaceNext( nextLink: string, options?: coreHttp.OperationOptions - ): Promise< - DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceNextResponse - > { + ): Promise { const { span, updatedOptions } = createSpan( "ArtifactsClient-_queryDataFlowDebugSessionsByWorkspaceNext", options || {} ); const operationArguments: coreHttp.OperationArguments = { nextLink, - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { const result = await this.client.sendOperationRequest( diff --git a/sdk/synapse/synapse-artifacts/src/operations/dataset.ts b/sdk/synapse/synapse-artifacts/src/operations/dataset.ts index 17160897b375..1efd41ff66fa 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/dataset.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/dataset.ts @@ -68,10 +68,7 @@ export class DatasetImpl implements Dataset { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getDatasetsByWorkspaceNext( - continuationToken, - options - ); + result = await this._getDatasetsByWorkspaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -97,9 +94,7 @@ export class DatasetImpl implements Dataset { options || {} ); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { const result = await this.client.sendOperationRequest( @@ -177,15 +172,10 @@ export class DatasetImpl implements Dataset { datasetName: string, options?: DatasetGetDatasetOptionalParams ): Promise { - const { span, updatedOptions } = createSpan( - "ArtifactsClient-getDataset", - options || {} - ); + const { span, updatedOptions } = createSpan("ArtifactsClient-getDataset", options || {}); const operationArguments: coreHttp.OperationArguments = { datasetName, - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { const result = await this.client.sendOperationRequest( @@ -213,10 +203,7 @@ export class DatasetImpl implements Dataset { datasetName: string, options?: coreHttp.OperationOptions ): Promise> { - const { span, updatedOptions } = createSpan( - "ArtifactsClient-deleteDataset", - options || {} - ); + const { span, updatedOptions } = createSpan("ArtifactsClient-deleteDataset", options || {}); const operationArguments: coreHttp.OperationArguments = { datasetName, options: this.getOperationOptions(updatedOptions, "undefined") @@ -262,10 +249,7 @@ export class DatasetImpl implements Dataset { request: ArtifactRenameRequest, options?: coreHttp.OperationOptions ): Promise> { - const { span, updatedOptions } = createSpan( - "ArtifactsClient-renameDataset", - options || {} - ); + const { span, updatedOptions } = createSpan("ArtifactsClient-renameDataset", options || {}); const operationArguments: coreHttp.OperationArguments = { datasetName, request, @@ -316,9 +300,7 @@ export class DatasetImpl implements Dataset { ); const operationArguments: coreHttp.OperationArguments = { nextLink, - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { const result = await this.client.sendOperationRequest( @@ -391,11 +373,7 @@ const createOrUpdateDatasetOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.dataset, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.datasetName], - headerParameters: [ - Parameters.accept, - Parameters.contentType, - Parameters.ifMatch - ], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts b/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts index e8b0d2f9cbc8..fede3cdfe3f2 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts @@ -13,10 +13,7 @@ import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; import { ArtifactsClientContext } from "../artifactsClientContext"; -import { - IntegrationRuntimesListResponse, - IntegrationRuntimesGetResponse -} from "../models"; +import { IntegrationRuntimesListResponse, IntegrationRuntimesGetResponse } from "../models"; /** Class representing a IntegrationRuntimes. */ export class IntegrationRuntimesImpl implements IntegrationRuntimes { @@ -34,23 +31,13 @@ export class IntegrationRuntimesImpl implements IntegrationRuntimes { * List Integration Runtimes * @param options The options parameters. */ - async list( - options?: coreHttp.OperationOptions - ): Promise { - const { span, updatedOptions } = createSpan( - "ArtifactsClient-list", - options || {} - ); + async list(options?: coreHttp.OperationOptions): Promise { + const { span, updatedOptions } = createSpan("ArtifactsClient-list", options || {}); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { - const result = await this.client.sendOperationRequest( - operationArguments, - listOperationSpec - ); + const result = await this.client.sendOperationRequest(operationArguments, listOperationSpec); return result as IntegrationRuntimesListResponse; } catch (error) { span.setStatus({ @@ -72,21 +59,13 @@ export class IntegrationRuntimesImpl implements IntegrationRuntimes { integrationRuntimeName: string, options?: coreHttp.OperationOptions ): Promise { - const { span, updatedOptions } = createSpan( - "ArtifactsClient-get", - options || {} - ); + const { span, updatedOptions } = createSpan("ArtifactsClient-get", options || {}); const operationArguments: coreHttp.OperationArguments = { integrationRuntimeName, - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { - const result = await this.client.sendOperationRequest( - operationArguments, - getOperationSpec - ); + const result = await this.client.sendOperationRequest(operationArguments, getOperationSpec); return result as IntegrationRuntimesGetResponse; } catch (error) { span.setStatus({ diff --git a/sdk/synapse/synapse-artifacts/src/operations/library.ts b/sdk/synapse/synapse-artifacts/src/operations/library.ts index e37f6b1c8125..91f61baa05b2 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/library.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/library.ts @@ -42,9 +42,7 @@ export class LibraryImpl implements Library { * Lists Library. * @param options The options parameters. */ - public list( - options?: coreHttp.OperationOptions - ): PagedAsyncIterableIterator { + public list(options?: coreHttp.OperationOptions): PagedAsyncIterableIterator { const iter = this.listPagingAll(options); return { next() { @@ -84,23 +82,13 @@ export class LibraryImpl implements Library { * Lists Library. * @param options The options parameters. */ - private async _list( - options?: coreHttp.OperationOptions - ): Promise { - const { span, updatedOptions } = createSpan( - "ArtifactsClient-_list", - options || {} - ); + private async _list(options?: coreHttp.OperationOptions): Promise { + const { span, updatedOptions } = createSpan("ArtifactsClient-_list", options || {}); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { - const result = await this.client.sendOperationRequest( - operationArguments, - listOperationSpec - ); + const result = await this.client.sendOperationRequest(operationArguments, listOperationSpec); return result as LibraryListOperationResponse; } catch (error) { span.setStatus({ @@ -123,10 +111,7 @@ export class LibraryImpl implements Library { libraryName: string, options?: coreHttp.OperationOptions ): Promise> { - const { span, updatedOptions } = createSpan( - "ArtifactsClient-flush", - options || {} - ); + const { span, updatedOptions } = createSpan("ArtifactsClient-flush", options || {}); const operationArguments: coreHttp.OperationArguments = { libraryName, options: this.getOperationOptions(updatedOptions, "undefined") @@ -149,10 +134,7 @@ export class LibraryImpl implements Library { } }; - const initialOperationResult = await sendOperation( - operationArguments, - flushOperationSpec - ); + const initialOperationResult = await sendOperation(operationArguments, flushOperationSpec); return new LROPoller({ initialOperationArguments: operationArguments, initialOperationSpec: flushOperationSpec, @@ -176,9 +158,7 @@ export class LibraryImpl implements Library { ); const operationArguments: coreHttp.OperationArguments = { operationId, - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { const result = await this.client.sendOperationRequest( @@ -207,10 +187,7 @@ export class LibraryImpl implements Library { libraryName: string, options?: coreHttp.OperationOptions ): Promise> { - const { span, updatedOptions } = createSpan( - "ArtifactsClient-delete", - options || {} - ); + const { span, updatedOptions } = createSpan("ArtifactsClient-delete", options || {}); const operationArguments: coreHttp.OperationArguments = { libraryName, options: this.getOperationOptions(updatedOptions, "undefined") @@ -233,10 +210,7 @@ export class LibraryImpl implements Library { } }; - const initialOperationResult = await sendOperation( - operationArguments, - deleteOperationSpec - ); + const initialOperationResult = await sendOperation(operationArguments, deleteOperationSpec); return new LROPoller({ initialOperationArguments: operationArguments, initialOperationSpec: deleteOperationSpec, @@ -251,25 +225,14 @@ export class LibraryImpl implements Library { * extension length. * @param options The options parameters. */ - async get( - libraryName: string, - options?: coreHttp.OperationOptions - ): Promise { - const { span, updatedOptions } = createSpan( - "ArtifactsClient-get", - options || {} - ); + async get(libraryName: string, options?: coreHttp.OperationOptions): Promise { + const { span, updatedOptions } = createSpan("ArtifactsClient-get", options || {}); const operationArguments: coreHttp.OperationArguments = { libraryName, - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { - const result = await this.client.sendOperationRequest( - operationArguments, - getOperationSpec - ); + const result = await this.client.sendOperationRequest(operationArguments, getOperationSpec); return result as LibraryGetResponse; } catch (error) { span.setStatus({ @@ -292,10 +255,7 @@ export class LibraryImpl implements Library { libraryName: string, options?: coreHttp.OperationOptions ): Promise> { - const { span, updatedOptions } = createSpan( - "ArtifactsClient-create", - options || {} - ); + const { span, updatedOptions } = createSpan("ArtifactsClient-create", options || {}); const operationArguments: coreHttp.OperationArguments = { libraryName, options: this.getOperationOptions(updatedOptions, "undefined") @@ -318,10 +278,7 @@ export class LibraryImpl implements Library { } }; - const initialOperationResult = await sendOperation( - operationArguments, - createOperationSpec - ); + const initialOperationResult = await sendOperation(operationArguments, createOperationSpec); return new LROPoller({ initialOperationArguments: operationArguments, initialOperationSpec: createOperationSpec, @@ -343,16 +300,11 @@ export class LibraryImpl implements Library { content: coreHttp.HttpRequestBody, options?: LibraryAppendOptionalParams ): Promise { - const { span, updatedOptions } = createSpan( - "ArtifactsClient-append", - options || {} - ); + const { span, updatedOptions } = createSpan("ArtifactsClient-append", options || {}); const operationArguments: coreHttp.OperationArguments = { libraryName, content, - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { const result = await this.client.sendOperationRequest( @@ -380,15 +332,10 @@ export class LibraryImpl implements Library { nextLink: string, options?: coreHttp.OperationOptions ): Promise { - const { span, updatedOptions } = createSpan( - "ArtifactsClient-_listNext", - options || {} - ); + const { span, updatedOptions } = createSpan("ArtifactsClient-_listNext", options || {}); const operationArguments: coreHttp.OperationArguments = { nextLink, - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { const result = await this.client.sendOperationRequest( diff --git a/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts b/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts index 5c398f239806..07804ed8c7bf 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts @@ -68,10 +68,7 @@ export class LinkedServiceImpl implements LinkedService { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getLinkedServicesByWorkspaceNext( - continuationToken, - options - ); + result = await this._getLinkedServicesByWorkspaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -80,9 +77,7 @@ export class LinkedServiceImpl implements LinkedService { private async *getLinkedServicesByWorkspacePagingAll( options?: coreHttp.OperationOptions ): AsyncIterableIterator { - for await (const page of this.getLinkedServicesByWorkspacePagingPage( - options - )) { + for await (const page of this.getLinkedServicesByWorkspacePagingPage(options)) { yield* page; } } @@ -99,9 +94,7 @@ export class LinkedServiceImpl implements LinkedService { options || {} ); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { const result = await this.client.sendOperationRequest( @@ -179,15 +172,10 @@ export class LinkedServiceImpl implements LinkedService { linkedServiceName: string, options?: LinkedServiceGetLinkedServiceOptionalParams ): Promise { - const { span, updatedOptions } = createSpan( - "ArtifactsClient-getLinkedService", - options || {} - ); + const { span, updatedOptions } = createSpan("ArtifactsClient-getLinkedService", options || {}); const operationArguments: coreHttp.OperationArguments = { linkedServiceName, - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { const result = await this.client.sendOperationRequest( @@ -319,9 +307,7 @@ export class LinkedServiceImpl implements LinkedService { ); const operationArguments: coreHttp.OperationArguments = { nextLink, - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { const result = await this.client.sendOperationRequest( @@ -394,11 +380,7 @@ const createOrUpdateLinkedServiceOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.linkedService, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.linkedServiceName], - headerParameters: [ - Parameters.accept, - Parameters.contentType, - Parameters.ifMatch - ], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/notebook.ts b/sdk/synapse/synapse-artifacts/src/operations/notebook.ts index 6e41623aab20..d39519bc9d03 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/notebook.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/notebook.ts @@ -70,10 +70,7 @@ export class NotebookImpl implements Notebook { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getNotebooksByWorkspaceNext( - continuationToken, - options - ); + result = await this._getNotebooksByWorkspaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -115,10 +112,7 @@ export class NotebookImpl implements Notebook { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getNotebookSummaryByWorkSpaceNext( - continuationToken, - options - ); + result = await this._getNotebookSummaryByWorkSpaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -127,9 +121,7 @@ export class NotebookImpl implements Notebook { private async *getNotebookSummaryByWorkSpacePagingAll( options?: coreHttp.OperationOptions ): AsyncIterableIterator { - for await (const page of this.getNotebookSummaryByWorkSpacePagingPage( - options - )) { + for await (const page of this.getNotebookSummaryByWorkSpacePagingPage(options)) { yield* page; } } @@ -146,9 +138,7 @@ export class NotebookImpl implements Notebook { options || {} ); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { const result = await this.client.sendOperationRequest( @@ -179,9 +169,7 @@ export class NotebookImpl implements Notebook { options || {} ); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { const result = await this.client.sendOperationRequest( @@ -259,15 +247,10 @@ export class NotebookImpl implements Notebook { notebookName: string, options?: NotebookGetNotebookOptionalParams ): Promise { - const { span, updatedOptions } = createSpan( - "ArtifactsClient-getNotebook", - options || {} - ); + const { span, updatedOptions } = createSpan("ArtifactsClient-getNotebook", options || {}); const operationArguments: coreHttp.OperationArguments = { notebookName, - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { const result = await this.client.sendOperationRequest( @@ -295,10 +278,7 @@ export class NotebookImpl implements Notebook { notebookName: string, options?: coreHttp.OperationOptions ): Promise> { - const { span, updatedOptions } = createSpan( - "ArtifactsClient-deleteNotebook", - options || {} - ); + const { span, updatedOptions } = createSpan("ArtifactsClient-deleteNotebook", options || {}); const operationArguments: coreHttp.OperationArguments = { notebookName, options: this.getOperationOptions(updatedOptions, "undefined") @@ -344,10 +324,7 @@ export class NotebookImpl implements Notebook { request: ArtifactRenameRequest, options?: coreHttp.OperationOptions ): Promise> { - const { span, updatedOptions } = createSpan( - "ArtifactsClient-renameNotebook", - options || {} - ); + const { span, updatedOptions } = createSpan("ArtifactsClient-renameNotebook", options || {}); const operationArguments: coreHttp.OperationArguments = { notebookName, request, @@ -399,9 +376,7 @@ export class NotebookImpl implements Notebook { ); const operationArguments: coreHttp.OperationArguments = { nextLink, - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { const result = await this.client.sendOperationRequest( @@ -436,9 +411,7 @@ export class NotebookImpl implements Notebook { ); const operationArguments: coreHttp.OperationArguments = { nextLink, - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { const result = await this.client.sendOperationRequest( @@ -527,11 +500,7 @@ const createOrUpdateNotebookOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.notebook, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.notebookName], - headerParameters: [ - Parameters.accept, - Parameters.contentType, - Parameters.ifMatch - ], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts b/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts index 026b76663b27..19b90d10778f 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts @@ -70,10 +70,7 @@ export class PipelineImpl implements Pipeline { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getPipelinesByWorkspaceNext( - continuationToken, - options - ); + result = await this._getPipelinesByWorkspaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -99,9 +96,7 @@ export class PipelineImpl implements Pipeline { options || {} ); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { const result = await this.client.sendOperationRequest( @@ -179,15 +174,10 @@ export class PipelineImpl implements Pipeline { pipelineName: string, options?: PipelineGetPipelineOptionalParams ): Promise { - const { span, updatedOptions } = createSpan( - "ArtifactsClient-getPipeline", - options || {} - ); + const { span, updatedOptions } = createSpan("ArtifactsClient-getPipeline", options || {}); const operationArguments: coreHttp.OperationArguments = { pipelineName, - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { const result = await this.client.sendOperationRequest( @@ -215,10 +205,7 @@ export class PipelineImpl implements Pipeline { pipelineName: string, options?: coreHttp.OperationOptions ): Promise> { - const { span, updatedOptions } = createSpan( - "ArtifactsClient-deletePipeline", - options || {} - ); + const { span, updatedOptions } = createSpan("ArtifactsClient-deletePipeline", options || {}); const operationArguments: coreHttp.OperationArguments = { pipelineName, options: this.getOperationOptions(updatedOptions, "undefined") @@ -264,10 +251,7 @@ export class PipelineImpl implements Pipeline { request: ArtifactRenameRequest, options?: coreHttp.OperationOptions ): Promise> { - const { span, updatedOptions } = createSpan( - "ArtifactsClient-renamePipeline", - options || {} - ); + const { span, updatedOptions } = createSpan("ArtifactsClient-renamePipeline", options || {}); const operationArguments: coreHttp.OperationArguments = { pipelineName, request, @@ -312,15 +296,10 @@ export class PipelineImpl implements Pipeline { pipelineName: string, options?: PipelineCreatePipelineRunOptionalParams ): Promise { - const { span, updatedOptions } = createSpan( - "ArtifactsClient-createPipelineRun", - options || {} - ); + const { span, updatedOptions } = createSpan("ArtifactsClient-createPipelineRun", options || {}); const operationArguments: coreHttp.OperationArguments = { pipelineName, - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { const result = await this.client.sendOperationRequest( @@ -355,9 +334,7 @@ export class PipelineImpl implements Pipeline { ); const operationArguments: coreHttp.OperationArguments = { nextLink, - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { const result = await this.client.sendOperationRequest( @@ -430,11 +407,7 @@ const createOrUpdatePipelineOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.pipeline, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.pipelineName], - headerParameters: [ - Parameters.accept, - Parameters.contentType, - Parameters.ifMatch - ], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts b/sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts index 7f8e0a69ec64..e111100c970e 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts @@ -48,9 +48,7 @@ export class PipelineRunImpl implements PipelineRun { ); const operationArguments: coreHttp.OperationArguments = { filterParameters, - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { const result = await this.client.sendOperationRequest( @@ -78,15 +76,10 @@ export class PipelineRunImpl implements PipelineRun { runId: string, options?: coreHttp.OperationOptions ): Promise { - const { span, updatedOptions } = createSpan( - "ArtifactsClient-getPipelineRun", - options || {} - ); + const { span, updatedOptions } = createSpan("ArtifactsClient-getPipelineRun", options || {}); const operationArguments: coreHttp.OperationArguments = { runId, - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { const result = await this.client.sendOperationRequest( @@ -118,17 +111,12 @@ export class PipelineRunImpl implements PipelineRun { filterParameters: RunFilterParameters, options?: coreHttp.OperationOptions ): Promise { - const { span, updatedOptions } = createSpan( - "ArtifactsClient-queryActivityRuns", - options || {} - ); + const { span, updatedOptions } = createSpan("ArtifactsClient-queryActivityRuns", options || {}); const operationArguments: coreHttp.OperationArguments = { pipelineName, runId, filterParameters, - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { const result = await this.client.sendOperationRequest( @@ -156,15 +144,10 @@ export class PipelineRunImpl implements PipelineRun { runId: string, options?: PipelineRunCancelPipelineRunOptionalParams ): Promise { - const { span, updatedOptions } = createSpan( - "ArtifactsClient-cancelPipelineRun", - options || {} - ); + const { span, updatedOptions } = createSpan("ArtifactsClient-cancelPipelineRun", options || {}); const operationArguments: coreHttp.OperationArguments = { runId, - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { const result = await this.client.sendOperationRequest( @@ -233,11 +216,7 @@ const queryActivityRunsOperationSpec: coreHttp.OperationSpec = { }, requestBody: Parameters.filterParameters, queryParameters: [Parameters.apiVersion], - urlParameters: [ - Parameters.endpoint, - Parameters.pipelineName, - Parameters.runId - ], + urlParameters: [Parameters.endpoint, Parameters.pipelineName, Parameters.runId], headerParameters: [Parameters.accept, Parameters.contentType], mediaType: "json", serializer diff --git a/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts b/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts index 47afdbb83ede..a047f7dd68ea 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts @@ -70,10 +70,7 @@ export class SparkJobDefinitionImpl implements SparkJobDefinition { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getSparkJobDefinitionsByWorkspaceNext( - continuationToken, - options - ); + result = await this._getSparkJobDefinitionsByWorkspaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -82,9 +79,7 @@ export class SparkJobDefinitionImpl implements SparkJobDefinition { private async *getSparkJobDefinitionsByWorkspacePagingAll( options?: coreHttp.OperationOptions ): AsyncIterableIterator { - for await (const page of this.getSparkJobDefinitionsByWorkspacePagingPage( - options - )) { + for await (const page of this.getSparkJobDefinitionsByWorkspacePagingPage(options)) { yield* page; } } @@ -101,9 +96,7 @@ export class SparkJobDefinitionImpl implements SparkJobDefinition { options || {} ); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { const result = await this.client.sendOperationRequest( @@ -132,9 +125,7 @@ export class SparkJobDefinitionImpl implements SparkJobDefinition { sparkJobDefinitionName: string, sparkJobDefinition: SparkJobDefinitionResource, options?: SparkJobDefinitionCreateOrUpdateSparkJobDefinitionOptionalParams - ): Promise< - LROPoller - > { + ): Promise> { const { span, updatedOptions } = createSpan( "ArtifactsClient-createOrUpdateSparkJobDefinition", options || {} @@ -189,9 +180,7 @@ export class SparkJobDefinitionImpl implements SparkJobDefinition { ); const operationArguments: coreHttp.OperationArguments = { sparkJobDefinitionName, - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { const result = await this.client.sendOperationRequest( @@ -419,9 +408,7 @@ export class SparkJobDefinitionImpl implements SparkJobDefinition { ); const operationArguments: coreHttp.OperationArguments = { nextLink, - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { const result = await this.client.sendOperationRequest( @@ -494,11 +481,7 @@ const createOrUpdateSparkJobDefinitionOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.sparkJobDefinition, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.sparkJobDefinitionName], - headerParameters: [ - Parameters.accept, - Parameters.contentType, - Parameters.ifMatch - ], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts b/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts index f5a738de072a..500273dc20e7 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts @@ -31,23 +31,13 @@ export class SqlPoolsImpl implements SqlPools { * List Sql Pools * @param options The options parameters. */ - async list( - options?: coreHttp.OperationOptions - ): Promise { - const { span, updatedOptions } = createSpan( - "ArtifactsClient-list", - options || {} - ); + async list(options?: coreHttp.OperationOptions): Promise { + const { span, updatedOptions } = createSpan("ArtifactsClient-list", options || {}); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { - const result = await this.client.sendOperationRequest( - operationArguments, - listOperationSpec - ); + const result = await this.client.sendOperationRequest(operationArguments, listOperationSpec); return result as SqlPoolsListResponse; } catch (error) { span.setStatus({ @@ -69,21 +59,13 @@ export class SqlPoolsImpl implements SqlPools { sqlPoolName: string, options?: coreHttp.OperationOptions ): Promise { - const { span, updatedOptions } = createSpan( - "ArtifactsClient-get", - options || {} - ); + const { span, updatedOptions } = createSpan("ArtifactsClient-get", options || {}); const operationArguments: coreHttp.OperationArguments = { sqlPoolName, - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { - const result = await this.client.sendOperationRequest( - operationArguments, - getOperationSpec - ); + const result = await this.client.sendOperationRequest(operationArguments, getOperationSpec); return result as SqlPoolsGetResponse; } catch (error) { span.setStatus({ diff --git a/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts b/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts index bb4c1d07b6bc..a02ffb8e8dc2 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts @@ -68,10 +68,7 @@ export class SqlScriptImpl implements SqlScript { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getSqlScriptsByWorkspaceNext( - continuationToken, - options - ); + result = await this._getSqlScriptsByWorkspaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -97,9 +94,7 @@ export class SqlScriptImpl implements SqlScript { options || {} ); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { const result = await this.client.sendOperationRequest( @@ -177,15 +172,10 @@ export class SqlScriptImpl implements SqlScript { sqlScriptName: string, options?: SqlScriptGetSqlScriptOptionalParams ): Promise { - const { span, updatedOptions } = createSpan( - "ArtifactsClient-getSqlScript", - options || {} - ); + const { span, updatedOptions } = createSpan("ArtifactsClient-getSqlScript", options || {}); const operationArguments: coreHttp.OperationArguments = { sqlScriptName, - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { const result = await this.client.sendOperationRequest( @@ -213,10 +203,7 @@ export class SqlScriptImpl implements SqlScript { sqlScriptName: string, options?: coreHttp.OperationOptions ): Promise> { - const { span, updatedOptions } = createSpan( - "ArtifactsClient-deleteSqlScript", - options || {} - ); + const { span, updatedOptions } = createSpan("ArtifactsClient-deleteSqlScript", options || {}); const operationArguments: coreHttp.OperationArguments = { sqlScriptName, options: this.getOperationOptions(updatedOptions, "undefined") @@ -262,10 +249,7 @@ export class SqlScriptImpl implements SqlScript { request: ArtifactRenameRequest, options?: coreHttp.OperationOptions ): Promise> { - const { span, updatedOptions } = createSpan( - "ArtifactsClient-renameSqlScript", - options || {} - ); + const { span, updatedOptions } = createSpan("ArtifactsClient-renameSqlScript", options || {}); const operationArguments: coreHttp.OperationArguments = { sqlScriptName, request, @@ -317,9 +301,7 @@ export class SqlScriptImpl implements SqlScript { ); const operationArguments: coreHttp.OperationArguments = { nextLink, - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { const result = await this.client.sendOperationRequest( @@ -392,11 +374,7 @@ const createOrUpdateSqlScriptOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.sqlScript, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.sqlScriptName], - headerParameters: [ - Parameters.accept, - Parameters.contentType, - Parameters.ifMatch - ], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/trigger.ts b/sdk/synapse/synapse-artifacts/src/operations/trigger.ts index f7e5619c4fc9..144b7f1d9385 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/trigger.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/trigger.ts @@ -70,10 +70,7 @@ export class TriggerImpl implements Trigger { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getTriggersByWorkspaceNext( - continuationToken, - options - ); + result = await this._getTriggersByWorkspaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -99,9 +96,7 @@ export class TriggerImpl implements Trigger { options || {} ); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { const result = await this.client.sendOperationRequest( @@ -179,15 +174,10 @@ export class TriggerImpl implements Trigger { triggerName: string, options?: TriggerGetTriggerOptionalParams ): Promise { - const { span, updatedOptions } = createSpan( - "ArtifactsClient-getTrigger", - options || {} - ); + const { span, updatedOptions } = createSpan("ArtifactsClient-getTrigger", options || {}); const operationArguments: coreHttp.OperationArguments = { triggerName, - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { const result = await this.client.sendOperationRequest( @@ -215,10 +205,7 @@ export class TriggerImpl implements Trigger { triggerName: string, options?: coreHttp.OperationOptions ): Promise> { - const { span, updatedOptions } = createSpan( - "ArtifactsClient-deleteTrigger", - options || {} - ); + const { span, updatedOptions } = createSpan("ArtifactsClient-deleteTrigger", options || {}); const operationArguments: coreHttp.OperationArguments = { triggerName, options: this.getOperationOptions(updatedOptions, "undefined") @@ -315,9 +302,7 @@ export class TriggerImpl implements Trigger { ); const operationArguments: coreHttp.OperationArguments = { triggerName, - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { const result = await this.client.sendOperationRequest( @@ -392,10 +377,7 @@ export class TriggerImpl implements Trigger { triggerName: string, options?: coreHttp.OperationOptions ): Promise> { - const { span, updatedOptions } = createSpan( - "ArtifactsClient-startTrigger", - options || {} - ); + const { span, updatedOptions } = createSpan("ArtifactsClient-startTrigger", options || {}); const operationArguments: coreHttp.OperationArguments = { triggerName, options: this.getOperationOptions(updatedOptions, "undefined") @@ -439,10 +421,7 @@ export class TriggerImpl implements Trigger { triggerName: string, options?: coreHttp.OperationOptions ): Promise> { - const { span, updatedOptions } = createSpan( - "ArtifactsClient-stopTrigger", - options || {} - ); + const { span, updatedOptions } = createSpan("ArtifactsClient-stopTrigger", options || {}); const operationArguments: coreHttp.OperationArguments = { triggerName, options: this.getOperationOptions(updatedOptions, "undefined") @@ -492,9 +471,7 @@ export class TriggerImpl implements Trigger { ); const operationArguments: coreHttp.OperationArguments = { nextLink, - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { const result = await this.client.sendOperationRequest( @@ -567,11 +544,7 @@ const createOrUpdateTriggerOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.trigger, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.triggerName], - headerParameters: [ - Parameters.accept, - Parameters.contentType, - Parameters.ifMatch - ], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts b/sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts index c2c5b712b000..4c0e122b247a 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts @@ -13,10 +13,7 @@ import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; import { ArtifactsClientContext } from "../artifactsClientContext"; -import { - RunFilterParameters, - TriggerRunQueryTriggerRunsByWorkspaceResponse -} from "../models"; +import { RunFilterParameters, TriggerRunQueryTriggerRunsByWorkspaceResponse } from "../models"; /** Class representing a TriggerRun. */ export class TriggerRunImpl implements TriggerRun { @@ -48,9 +45,7 @@ export class TriggerRunImpl implements TriggerRun { const operationArguments: coreHttp.OperationArguments = { triggerName, runId, - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { const result = await this.client.sendOperationRequest( @@ -87,9 +82,7 @@ export class TriggerRunImpl implements TriggerRun { const operationArguments: coreHttp.OperationArguments = { triggerName, runId, - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { const result = await this.client.sendOperationRequest( @@ -123,9 +116,7 @@ export class TriggerRunImpl implements TriggerRun { ); const operationArguments: coreHttp.OperationArguments = { filterParameters, - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { const result = await this.client.sendOperationRequest( @@ -157,11 +148,7 @@ const rerunTriggerInstanceOperationSpec: coreHttp.OperationSpec = { } }, queryParameters: [Parameters.apiVersion], - urlParameters: [ - Parameters.endpoint, - Parameters.runId, - Parameters.triggerName - ], + urlParameters: [Parameters.endpoint, Parameters.runId, Parameters.triggerName], headerParameters: [Parameters.accept], serializer }; @@ -175,11 +162,7 @@ const cancelTriggerInstanceOperationSpec: coreHttp.OperationSpec = { } }, queryParameters: [Parameters.apiVersion], - urlParameters: [ - Parameters.endpoint, - Parameters.runId, - Parameters.triggerName - ], + urlParameters: [Parameters.endpoint, Parameters.runId, Parameters.triggerName], headerParameters: [Parameters.accept], serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/workspace.ts b/sdk/synapse/synapse-artifacts/src/operations/workspace.ts index e659ffb31a03..687ad4376f23 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/workspace.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/workspace.ts @@ -31,23 +31,13 @@ export class WorkspaceImpl implements Workspace { * Get Workspace * @param options The options parameters. */ - async get( - options?: coreHttp.OperationOptions - ): Promise { - const { span, updatedOptions } = createSpan( - "ArtifactsClient-get", - options || {} - ); + async get(options?: coreHttp.OperationOptions): Promise { + const { span, updatedOptions } = createSpan("ArtifactsClient-get", options || {}); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { - const result = await this.client.sendOperationRequest( - operationArguments, - getOperationSpec - ); + const result = await this.client.sendOperationRequest(operationArguments, getOperationSpec); return result as WorkspaceGetResponse; } catch (error) { span.setStatus({ diff --git a/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts b/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts index 2e217d6470c3..40c76f21bef1 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts @@ -20,8 +20,7 @@ import { } from "../models"; /** Class representing a WorkspaceGitRepoManagement. */ -export class WorkspaceGitRepoManagementImpl - implements WorkspaceGitRepoManagement { +export class WorkspaceGitRepoManagementImpl implements WorkspaceGitRepoManagement { private readonly client: ArtifactsClientContext; /** @@ -47,9 +46,7 @@ export class WorkspaceGitRepoManagementImpl ); const operationArguments: coreHttp.OperationArguments = { gitHubAccessTokenRequest, - options: coreHttp.operationOptionsToRequestOptionsBase( - updatedOptions || {} - ) + options: coreHttp.operationOptionsToRequestOptionsBase(updatedOptions || {}) }; try { const result = await this.client.sendOperationRequest( @@ -82,11 +79,7 @@ const getGitHubAccessTokenOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.gitHubAccessTokenRequest, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint], - headerParameters: [ - Parameters.accept, - Parameters.contentType, - Parameters.clientRequestId - ], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.clientRequestId], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/integrationRuntimes.ts b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/integrationRuntimes.ts index 21abe2e6d7b0..e914c29353ff 100644 --- a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/integrationRuntimes.ts +++ b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/integrationRuntimes.ts @@ -7,10 +7,7 @@ */ import * as coreHttp from "@azure/core-http"; -import { - IntegrationRuntimesListResponse, - IntegrationRuntimesGetResponse -} from "../models"; +import { IntegrationRuntimesListResponse, IntegrationRuntimesGetResponse } from "../models"; /** Interface representing a IntegrationRuntimes. */ export interface IntegrationRuntimes { @@ -18,9 +15,7 @@ export interface IntegrationRuntimes { * List Integration Runtimes * @param options The options parameters. */ - list( - options?: coreHttp.OperationOptions - ): Promise; + list(options?: coreHttp.OperationOptions): Promise; /** * Get Integration Runtime * @param integrationRuntimeName The Integration Runtime name diff --git a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/library.ts b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/library.ts index 51e4ba8d8dd6..ba7e83aa3805 100644 --- a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/library.ts +++ b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/library.ts @@ -6,7 +6,6 @@ * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ - import "@azure/core-paging"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; import * as coreHttp from "@azure/core-http"; @@ -25,9 +24,7 @@ export interface Library { * Lists Library. * @param options The options parameters. */ - list( - options?: coreHttp.OperationOptions - ): PagedAsyncIterableIterator; + list(options?: coreHttp.OperationOptions): PagedAsyncIterableIterator; /** * Flush Library * @param libraryName file name to upload. Minimum length of the filename should be 1 excluding the @@ -63,10 +60,7 @@ export interface Library { * extension length. * @param options The options parameters. */ - get( - libraryName: string, - options?: coreHttp.OperationOptions - ): Promise; + get(libraryName: string, options?: coreHttp.OperationOptions): Promise; /** * Creates a library with the library name. * @param libraryName file name to upload. Minimum length of the filename should be 1 excluding the diff --git a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/sparkJobDefinition.ts b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/sparkJobDefinition.ts index 9787084497f5..7672cbb59714 100644 --- a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/sparkJobDefinition.ts +++ b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/sparkJobDefinition.ts @@ -41,9 +41,7 @@ export interface SparkJobDefinition { sparkJobDefinitionName: string, sparkJobDefinition: SparkJobDefinitionResource, options?: SparkJobDefinitionCreateOrUpdateSparkJobDefinitionOptionalParams - ): Promise< - LROPoller - >; + ): Promise>; /** * Gets a Spark Job Definition. * @param sparkJobDefinitionName The spark job definition name. diff --git a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/sqlPools.ts b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/sqlPools.ts index 4f36e9842297..772fdb484488 100644 --- a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/sqlPools.ts +++ b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/sqlPools.ts @@ -21,8 +21,5 @@ export interface SqlPools { * @param sqlPoolName The Sql Pool name * @param options The options parameters. */ - get( - sqlPoolName: string, - options?: coreHttp.OperationOptions - ): Promise; + get(sqlPoolName: string, options?: coreHttp.OperationOptions): Promise; } diff --git a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/triggerRun.ts b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/triggerRun.ts index da0789e055bb..f305649d3d9f 100644 --- a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/triggerRun.ts +++ b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/triggerRun.ts @@ -7,10 +7,7 @@ */ import * as coreHttp from "@azure/core-http"; -import { - RunFilterParameters, - TriggerRunQueryTriggerRunsByWorkspaceResponse -} from "../models"; +import { RunFilterParameters, TriggerRunQueryTriggerRunsByWorkspaceResponse } from "../models"; /** Interface representing a TriggerRun. */ export interface TriggerRun { From 622c1efb4de7515bb329b10939062d89e8048f51 Mon Sep 17 00:00:00 2001 From: Jose Manuel Heredia Hidalgo Date: Mon, 5 Apr 2021 18:18:54 +0000 Subject: [PATCH 3/6] Update changelog and constants file --- sdk/synapse/synapse-artifacts/CHANGELOG.md | 5 +++++ sdk/synapse/synapse-artifacts/package.json | 6 +++++- sdk/synapse/synapse-artifacts/src/utils/constants.ts | 9 +++++++++ sdk/synapse/synapse-artifacts/swagger/README.md | 2 +- 4 files changed, 20 insertions(+), 2 deletions(-) create mode 100644 sdk/synapse/synapse-artifacts/src/utils/constants.ts diff --git a/sdk/synapse/synapse-artifacts/CHANGELOG.md b/sdk/synapse/synapse-artifacts/CHANGELOG.md index 16eed68e7637..4f4013d9e849 100644 --- a/sdk/synapse/synapse-artifacts/CHANGELOG.md +++ b/sdk/synapse/synapse-artifacts/CHANGELOG.md @@ -1,5 +1,10 @@ # Release History +## 1.0.0-beta.4 (2021-04-06) + +- Adds ADF support +- Consume latest Code Generator changes + ## 1.0.0-beta.3 (2021-03-09) - Regenerated from the latest versions of REST API and Code Generator diff --git a/sdk/synapse/synapse-artifacts/package.json b/sdk/synapse/synapse-artifacts/package.json index c902c8c8c3cb..8a8259e3d9d0 100644 --- a/sdk/synapse/synapse-artifacts/package.json +++ b/sdk/synapse/synapse-artifacts/package.json @@ -3,7 +3,7 @@ "author": "Microsoft Corporation", "description": "A generated SDK for ArtifactsClient.", "sdk-type": "client", - "version": "1.0.0-beta.3", + "version": "1.0.0-beta.5", "homepage": "https://github.com/Azure/azure-sdk-for-js/blob/master/sdk/synapse/synapse-artifacts/README.md", "repository": "github:Azure/azure-sdk-for-js", "dependencies": { @@ -83,6 +83,10 @@ { "path": "src/utils/constants.ts", "prefix": "SDK_VERSION" + }, + { + "path": "swagger/README.md", + "prefix": "pacage-version" } ] } diff --git a/sdk/synapse/synapse-artifacts/src/utils/constants.ts b/sdk/synapse/synapse-artifacts/src/utils/constants.ts new file mode 100644 index 000000000000..dbe3c7bc0fdc --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/utils/constants.ts @@ -0,0 +1,9 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ + +export const SDK_VERSION="1.0.0-beta.4" diff --git a/sdk/synapse/synapse-artifacts/swagger/README.md b/sdk/synapse/synapse-artifacts/swagger/README.md index e14b60de2ca5..98f67f997da4 100644 --- a/sdk/synapse/synapse-artifacts/swagger/README.md +++ b/sdk/synapse/synapse-artifacts/swagger/README.md @@ -20,7 +20,7 @@ modelerfour: batch: - package-artifacts: true package-name: "@azure/synapse-artifacts" - pacage-version: "1.0.0-beta.3" + pacage-version: "1.0.0-beta.4" add-credentials: true license-header: MICROSOFT_MIT_NO_VERSION credential-scopes: https://dev.azuresynapse.net/.default From 494d02ac4211138330851aa277670f0a251a6ee3 Mon Sep 17 00:00:00 2001 From: Jose Manuel Heredia Hidalgo Date: Mon, 5 Apr 2021 18:28:49 +0000 Subject: [PATCH 4/6] Format --- sdk/synapse/synapse-artifacts/src/utils/constants.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/synapse/synapse-artifacts/src/utils/constants.ts b/sdk/synapse/synapse-artifacts/src/utils/constants.ts index dbe3c7bc0fdc..4b88a59cc9d9 100644 --- a/sdk/synapse/synapse-artifacts/src/utils/constants.ts +++ b/sdk/synapse/synapse-artifacts/src/utils/constants.ts @@ -6,4 +6,4 @@ * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -export const SDK_VERSION="1.0.0-beta.4" +export const SDK_VERSION = "1.0.0-beta.4"; From 1a87b641a00dce21d216f5e6e9abd1fc5fcee0ee Mon Sep 17 00:00:00 2001 From: Jose Manuel Heredia Hidalgo Date: Mon, 5 Apr 2021 22:34:34 +0000 Subject: [PATCH 5/6] Add acceptance tests --- .../synapse-artifacts/api-extractor.json | 2 +- sdk/synapse/synapse-artifacts/karma.conf.js | 143 +++++++++ sdk/synapse/synapse-artifacts/package.json | 68 +++- ...ding_should_get_a_bigdatapool_by_name.json | 50 +++ .../recording_should_list_bigdatapools.json | 50 +++ .../recording_should_create_dataflow.json | 148 +++++++++ .../recording_should_delete_dataflow.json | 117 +++++++ .../recording_should_get_dataflow.json | 56 ++++ .../recording_should_list_dataflows.json | 50 +++ .../recording_should_rename_dataflow.json | 302 ++++++++++++++++++ ...ording_should_get_integrationruntimes.json | 56 ++++ ...rding_should_list_integrationruntimes.json | 56 ++++ .../recording_should_create_library.json | 142 ++++++++ .../recording_should_delete_library.json | 140 ++++++++ .../library/recording_should_get_library.json | 50 +++ .../recording_should_list_library.json | 50 +++ ...ording_should_get_a_bigdatapool_by_name.js | 54 ++++ .../recording_should_list_bigdatapools.js | 54 ++++ .../recording_should_create_dataflow.js | 186 +++++++++++ .../recording_should_delete_dataflow.js | 136 ++++++++ .../dataflow/recording_should_get_dataflow.js | 66 ++++ .../recording_should_list_dataflows.js | 54 ++++ .../recording_should_rename_dataflow.js | 232 ++++++++++++++ ...ecording_should_get_integrationruntimes.js | 66 ++++ ...cording_should_list_integrationruntimes.js | 66 ++++ .../recording_should_create_library.js | 114 +++++++ .../recording_should_delete_library.js | 136 ++++++++ .../library/recording_should_get_library.js | 54 ++++ .../library/recording_should_list_library.js | 54 ++++ .../synapse-artifacts/rollup.config.js | 43 +-- sdk/synapse/synapse-artifacts/sample.env | 11 + sdk/synapse/synapse-artifacts/src/index.ts | 2 + .../src/operations/dataFlow.ts | 3 +- .../src/operations/dataFlowDebugSession.ts | 2 +- .../src/operations/dataset.ts | 3 +- .../src/operations/library.ts | 3 +- .../src/operations/linkedService.ts | 2 +- .../src/operations/notebook.ts | 2 +- .../src/operations/pipeline.ts | 2 +- .../src/operations/sparkJobDefinition.ts | 2 +- .../src/operations/sqlScript.ts | 2 +- .../src/operations/trigger.ts | 2 +- .../test/public/bigDataPools.spec.ts | 29 ++ .../test/public/dataFlows.spec.ts | 63 ++++ .../test/public/integrationRuntimes.spec.ts | 32 ++ .../test/public/library.spec.ts | 47 +++ .../test/public/utils/env.browser.ts | 2 + .../test/public/utils/env.ts | 6 + .../test/public/utils/recordedClient.ts | 63 ++++ sdk/synapse/synapse-artifacts/tsconfig.json | 19 +- 50 files changed, 3008 insertions(+), 84 deletions(-) create mode 100644 sdk/synapse/synapse-artifacts/karma.conf.js create mode 100644 sdk/synapse/synapse-artifacts/recordings/browsers/bigdatapools/recording_should_get_a_bigdatapool_by_name.json create mode 100644 sdk/synapse/synapse-artifacts/recordings/browsers/bigdatapools/recording_should_list_bigdatapools.json create mode 100644 sdk/synapse/synapse-artifacts/recordings/browsers/dataflow/recording_should_create_dataflow.json create mode 100644 sdk/synapse/synapse-artifacts/recordings/browsers/dataflow/recording_should_delete_dataflow.json create mode 100644 sdk/synapse/synapse-artifacts/recordings/browsers/dataflow/recording_should_get_dataflow.json create mode 100644 sdk/synapse/synapse-artifacts/recordings/browsers/dataflow/recording_should_list_dataflows.json create mode 100644 sdk/synapse/synapse-artifacts/recordings/browsers/dataflow/recording_should_rename_dataflow.json create mode 100644 sdk/synapse/synapse-artifacts/recordings/browsers/integrationruntimes/recording_should_get_integrationruntimes.json create mode 100644 sdk/synapse/synapse-artifacts/recordings/browsers/integrationruntimes/recording_should_list_integrationruntimes.json create mode 100644 sdk/synapse/synapse-artifacts/recordings/browsers/library/recording_should_create_library.json create mode 100644 sdk/synapse/synapse-artifacts/recordings/browsers/library/recording_should_delete_library.json create mode 100644 sdk/synapse/synapse-artifacts/recordings/browsers/library/recording_should_get_library.json create mode 100644 sdk/synapse/synapse-artifacts/recordings/browsers/library/recording_should_list_library.json create mode 100644 sdk/synapse/synapse-artifacts/recordings/node/bigdatapools/recording_should_get_a_bigdatapool_by_name.js create mode 100644 sdk/synapse/synapse-artifacts/recordings/node/bigdatapools/recording_should_list_bigdatapools.js create mode 100644 sdk/synapse/synapse-artifacts/recordings/node/dataflow/recording_should_create_dataflow.js create mode 100644 sdk/synapse/synapse-artifacts/recordings/node/dataflow/recording_should_delete_dataflow.js create mode 100644 sdk/synapse/synapse-artifacts/recordings/node/dataflow/recording_should_get_dataflow.js create mode 100644 sdk/synapse/synapse-artifacts/recordings/node/dataflow/recording_should_list_dataflows.js create mode 100644 sdk/synapse/synapse-artifacts/recordings/node/dataflow/recording_should_rename_dataflow.js create mode 100644 sdk/synapse/synapse-artifacts/recordings/node/integrationruntimes/recording_should_get_integrationruntimes.js create mode 100644 sdk/synapse/synapse-artifacts/recordings/node/integrationruntimes/recording_should_list_integrationruntimes.js create mode 100644 sdk/synapse/synapse-artifacts/recordings/node/library/recording_should_create_library.js create mode 100644 sdk/synapse/synapse-artifacts/recordings/node/library/recording_should_delete_library.js create mode 100644 sdk/synapse/synapse-artifacts/recordings/node/library/recording_should_get_library.js create mode 100644 sdk/synapse/synapse-artifacts/recordings/node/library/recording_should_list_library.js create mode 100644 sdk/synapse/synapse-artifacts/sample.env create mode 100644 sdk/synapse/synapse-artifacts/test/public/bigDataPools.spec.ts create mode 100644 sdk/synapse/synapse-artifacts/test/public/dataFlows.spec.ts create mode 100644 sdk/synapse/synapse-artifacts/test/public/integrationRuntimes.spec.ts create mode 100644 sdk/synapse/synapse-artifacts/test/public/library.spec.ts create mode 100644 sdk/synapse/synapse-artifacts/test/public/utils/env.browser.ts create mode 100644 sdk/synapse/synapse-artifacts/test/public/utils/env.ts create mode 100644 sdk/synapse/synapse-artifacts/test/public/utils/recordedClient.ts diff --git a/sdk/synapse/synapse-artifacts/api-extractor.json b/sdk/synapse/synapse-artifacts/api-extractor.json index a5982c5913e1..ac5e56848f19 100644 --- a/sdk/synapse/synapse-artifacts/api-extractor.json +++ b/sdk/synapse/synapse-artifacts/api-extractor.json @@ -1,6 +1,6 @@ { "$schema": "https://developer.microsoft.com/json-schemas/api-extractor/v7/api-extractor.schema.json", - "mainEntryPointFilePath": "./dist-esm/index.d.ts", + "mainEntryPointFilePath": "types/src/index.d.ts", "docModel": { "enabled": true }, "apiReport": { "enabled": true, "reportFolder": "./review" }, "dtsRollup": { diff --git a/sdk/synapse/synapse-artifacts/karma.conf.js b/sdk/synapse/synapse-artifacts/karma.conf.js new file mode 100644 index 000000000000..6802c8b9d0d7 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/karma.conf.js @@ -0,0 +1,143 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// https://github.com/karma-runner/karma-chrome-launcher +process.env.CHROME_BIN = require("puppeteer").executablePath(); +require("dotenv").config(); +const { + jsonRecordingFilterFunction, + isPlaybackMode, + isSoftRecordMode, + isRecordMode +} = require("@azure/test-utils-recorder"); + +module.exports = function(config) { + config.set({ + // base path that will be used to resolve all patterns (eg. files, exclude) + basePath: "./", + + // frameworks to use + // available frameworks: https://npmjs.org/browse/keyword/karma-adapter + frameworks: ["source-map-support", "mocha"], + + plugins: [ + "karma-mocha", + "karma-mocha-reporter", + "karma-chrome-launcher", + "karma-edge-launcher", + "karma-firefox-launcher", + "karma-ie-launcher", + "karma-env-preprocessor", + "karma-coverage", + "karma-sourcemap-loader", + "karma-junit-reporter", + "karma-json-to-file-reporter", + "karma-source-map-support", + "karma-json-preprocessor" + ], + + // list of files / patterns to load in the browser + files: [ + "dist-test/index.browser.js", + { pattern: "dist-test/index.browser.js.map", type: "html", included: false, served: true } + ].concat(isPlaybackMode() || isSoftRecordMode() ? ["recordings/browsers/**/*.json"] : []), + + // list of files / patterns to exclude + exclude: [], + + // preprocess matching files before serving them to the browser + // available preprocessors: https://npmjs.org/browse/keyword/karma-preprocessor + preprocessors: { + "**/*.js": ["sourcemap", "env"], + "recordings/browsers/**/*.json": ["json"] + // IMPORTANT: COMMENT following line if you want to debug in your browsers!! + // Preprocess source file to calculate code coverage, however this will make source file unreadable + // "dist-test/index.js": ["coverage"] + }, + + envPreprocessor: [ + "TEST_MODE", + "ENDPOINT", + "AZURE_CLIENT_ID", + "AZURE_CLIENT_SECRET", + "AZURE_TENANT_ID" + ], + + // test results reporter to use + // possible values: 'dots', 'progress' + // available reporters: https://npmjs.org/browse/keyword/karma-reporter + reporters: ["mocha", "coverage", "junit", "json-to-file"], + + coverageReporter: { + // specify a common output directory + dir: "coverage-browser/", + reporters: [ + { type: "json", subdir: ".", file: "coverage.json" }, + { type: "lcovonly", subdir: ".", file: "lcov.info" }, + { type: "html", subdir: "html" }, + { type: "cobertura", subdir: ".", file: "cobertura-coverage.xml" } + ] + }, + + junitReporter: { + outputDir: "", // results will be saved as $outputDir/$browserName.xml + outputFile: "test-results.browser.xml", // if included, results will be saved as $outputDir/$browserName/$outputFile + suite: "", // suite will become the package name attribute in xml testsuite element + useBrowserName: false, // add browser name to report and classes names + nameFormatter: undefined, // function (browser, result) to customize the name attribute in xml testcase element + classNameFormatter: undefined, // function (browser, result) to customize the classname attribute in xml testcase element + properties: {} // key value pair of properties to add to the section of the report + }, + + jsonToFileReporter: { + filter: jsonRecordingFilterFunction, + outputPath: "." + }, + + // web server port + port: 9876, + + // enable / disable colors in the output (reporters and logs) + colors: true, + + // level of logging + // possible values: config.LOG_DISABLE || config.LOG_ERROR || config.LOG_WARN || config.LOG_INFO || config.LOG_DEBUG + logLevel: config.LOG_INFO, + + // enable / disable watching file and executing tests whenever any file changes + autoWatch: false, + + // --no-sandbox allows our tests to run in Linux without having to change the system. + // --disable-web-security allows us to authenticate from the browser without having to write tests using interactive auth, which would be far more complex. + browsers: ["ChromeHeadlessNoSandbox"], + customLaunchers: { + ChromeHeadlessNoSandbox: { + base: "ChromeHeadless", + flags: ["--no-sandbox", "--disable-web-security"] + } + }, + + // Continuous Integration mode + // if true, Karma captures browsers, runs the tests and exits + singleRun: false, + + // Concurrency level + // how many browser should be started simultaneous + concurrency: 1, + + browserNoActivityTimeout: 60000000, + browserDisconnectTimeout: 10000, + browserDisconnectTolerance: 3, + browserConsoleLogOptions: { + terminal: !isRecordMode() + }, + + client: { + mocha: { + // change Karma's debug.html to the mocha web reporter + reporter: "html", + timeout: 0 + } + } + }); +}; diff --git a/sdk/synapse/synapse-artifacts/package.json b/sdk/synapse/synapse-artifacts/package.json index 8a8259e3d9d0..11a02a72ea81 100644 --- a/sdk/synapse/synapse-artifacts/package.json +++ b/sdk/synapse/synapse-artifacts/package.json @@ -3,7 +3,7 @@ "author": "Microsoft Corporation", "description": "A generated SDK for ArtifactsClient.", "sdk-type": "client", - "version": "1.0.0-beta.5", + "version": "1.0.0-beta.4", "homepage": "https://github.com/Azure/azure-sdk-for-js/blob/master/sdk/synapse/synapse-artifacts/README.md", "repository": "github:Azure/azure-sdk-for-js", "dependencies": { @@ -13,6 +13,9 @@ "@azure/core-tracing": "1.0.0-preview.11", "tslib": "^2.0.0" }, + "browser": { + "./dist-esm/test/public/utils/env.js": "./dist-esm/test/public/utils/env.browser.js" + }, "keywords": [ "node", "azure", @@ -32,17 +35,49 @@ "module": "./dist-esm/src/index.js", "types": "./types/synapse-artifacts.d.ts", "devDependencies": { - "typescript": "~4.2.0", - "eslint": "^7.15.0", - "@azure/eslint-plugin-azure-sdk": "^3.0.0", "@azure/dev-tool": "^1.0.0", + "@azure/eslint-plugin-azure-sdk": "^3.0.0", + "@azure/identity": "^1.1.0", + "@azure/test-utils-recorder": "^1.0.0", + "@microsoft/api-extractor": "7.7.11", + "@rollup/plugin-commonjs": "11.0.2", + "@types/chai": "^4.1.6", + "@types/mocha": "^7.0.2", + "chai": "^4.2.0", + "dotenv": "^8.2.0", + "eslint": "^7.15.0", + "karma-chrome-launcher": "^3.0.0", + "karma-coverage": "^2.0.0", + "karma-env-preprocessor": "^0.1.1", + "karma-firefox-launcher": "^1.1.0", + "karma-ie-launcher": "^1.0.0", + "karma-junit-reporter": "^2.0.1", + "karma-mocha-reporter": "^2.2.5", + "karma-mocha": "^2.0.1", + "karma-sourcemap-loader": "^0.3.8", + "karma": "^6.2.0", + "mocha-junit-reporter": "^1.18.0", + "mocha": "^7.1.1", + "nyc": "^14.0.0", + "prettier": "^1.16.4", "rimraf": "^3.0.0", - "rollup": "^1.16.3", "rollup-plugin-node-resolve": "^3.4.0", "rollup-plugin-sourcemaps": "^0.4.2", - "@rollup/plugin-commonjs": "11.0.2", + "rollup": "^1.16.3", "uglify-js": "^3.4.9", - "@microsoft/api-extractor": "7.7.11", + "@types/chai-as-promised": "^7.1.0", + "@types/node": "^8.0.0", + "@types/sinon": "^9.0.4", + "chai-as-promised": "^7.1.1", + "cross-env": "^7.0.2", + "karma-edge-launcher": "^0.4.2", + "karma-json-preprocessor": "^0.3.3", + "karma-json-to-file-reporter": "^1.0.1", + "karma-source-map-support": "~1.4.0", + "sinon": "^9.0.2", + "source-map-support": "^0.5.9", + "ts-node": "^9.0.0", + "typescript": "~4.2.0", "typedoc": "0.15.2" }, "bugs": { @@ -50,26 +85,31 @@ }, "files": [ "dist/", - "dist-esm/", + "dist-esm/src/", "types/synapse-artifacts.d.ts", "README.md", "LICENSE" ], "scripts": { - "build": "tsc && rollup -c rollup.config.js && npm run minify && npm run extract-api", + "build:browser": "tsc -p . && cross-env ONLY_BROWSER=true rollup -c 2>&1", + "build:node": "tsc -p . && cross-env ONLY_NODE=true rollup -c 2>&1", + "build:samples": "echo Obsolete.", + "build:test": "tsc -p . && rollup -c 2>&1", + "build": "tsc -p . && rollup -c 2>&1 && api-extractor run --local", + "build:debug": "tsc -p . && rollup -c 2>&1 && api-extractor run --local", "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/index.js.map'\" -o ./dist/index.min.js ./dist/index.js", "pack": "npm pack 2>&1", - "build:test": "echo skip", "lint": "echo skipped", "format": "echo skip format as this package has only auto generated code", "check-format": "echo skip format check as this package has only auto generated code", - "test": "echo skip", - "unit-test:browser": "echo skipped", - "unit-test:node": "echo skipped", + "test:browser": "npm run clean && npm run build:test && npm run unit-test:browser", + "test:node": "npm run clean && npm run build:test && npm run unit-test:node", + "test": "npm run clean && npm run build:test && npm run unit-test", + "unit-test:browser": "karma start --single-run", + "unit-test:node": "mocha -r esm --require ts-node/register --reporter ../../../common/tools/mocha-multi-reporter.js --timeout 1200000 --full-trace \"test/{,!(browser)/**/}*.spec.ts\"", "unit-test": "npm run unit-test:node && npm run unit-test:browser", "extract-api": "api-extractor run --local", "clean": "rimraf dist dist-browser dist-esm test-dist temp types *.tgz *.log", - "build:samples": "echo Skipped.", "docs": "typedoc --excludePrivate --excludeNotExported --excludeExternals --stripInternal --mode file --out ./dist/docs ./src" }, "sideEffects": false, diff --git a/sdk/synapse/synapse-artifacts/recordings/browsers/bigdatapools/recording_should_get_a_bigdatapool_by_name.json b/sdk/synapse/synapse-artifacts/recordings/browsers/bigdatapools/recording_should_get_a_bigdatapool_by_name.json new file mode 100644 index 000000000000..91da896c62ea --- /dev/null +++ b/sdk/synapse/synapse-artifacts/recordings/browsers/bigdatapools/recording_should_get_a_bigdatapool_by_name.json @@ -0,0 +1,50 @@ +{ + "recordings": [ + { + "method": "POST", + "url": "https://login.microsoftonline.com/88888888-8888-8888-8888-888888888888/oauth2/v2.0/token", + "query": {}, + "requestBody": "response_type=token&grant_type=client_credentials&client_id=azure_client_id&client_secret=azure_client_secret&scope=https%3A%2F%2Fdev.azuresynapse.net%2F.default", + "status": 200, + "response": "{\"token_type\":\"Bearer\",\"expires_in\":86399,\"ext_expires_in\":86399,\"access_token\":\"access_token\"}", + "responseHeaders": { + "cache-control": "no-store, no-cache", + "content-length": "1322", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:15:59 GMT", + "expires": "-1", + "p3p": "CP=\"DSP CUR OTPi IND OTRi ONL FIN\"", + "pragma": "no-cache", + "referrer-policy": "strict-origin-when-cross-origin", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-content-type-options": "nosniff", + "x-ms-ests-server": "2.1.11562.10 - NCUS ProdSlices", + "x-ms-request-id": "e955ba6f-e596-499b-be82-5c8cd2206a00" + } + }, + { + "method": "GET", + "url": "https://testaccount.dev.azuresynapse.net/bigDataPools/testsparkpool", + "query": { + "api-version": "2019-06-01-preview" + }, + "requestBody": null, + "status": 200, + "response": "{\"properties\":{\"creationDate\":\"2020-11-18T19:10:21.6Z\",\"sparkVersion\":\"2.4\",\"nodeCount\":10,\"nodeSize\":\"Small\",\"nodeSizeFamily\":\"MemoryOptimized\",\"autoScale\":{\"enabled\":true,\"minNodeCount\":3,\"maxNodeCount\":10},\"autoPause\":{\"enabled\":true,\"delayInMinutes\":15},\"isComputeIsolationEnabled\":false,\"sessionLevelPackagesEnabled\":false,\"cacheSize\":0,\"dynamicExecutorAllocation\":{\"enabled\":false},\"lastSucceededTimestamp\":\"2020-11-18T19:10:25.52Z\",\"provisioningState\":\"Succeeded\"},\"id\":\"/subscriptions/faa080af-c1d8-40ad-9cce-e1a450ca5b57/resourceGroups/xiangyan/providers/Microsoft.ProjectArcadia/workspaces/xysynapsetest/sparkComputes/testsparkpool\",\"name\":\"testsparkpool\",\"type\":\"Microsoft.ProjectArcadia/workspaces/sparkComputes\",\"location\":\"westus2\",\"tags\":{}}", + "responseHeaders": { + "content-length": "756", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:15:59 GMT", + "server": "Microsoft-HTTPAPI/2.0", + "status": "200", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-ms-request-id": "45086e38-941f-4345-b272-28fac8c01f16" + } + } + ], + "uniqueTestInfo": { + "uniqueName": {}, + "newDate": {} + }, + "hash": "287c718d64e94115faa2f2c283e3d913" +} \ No newline at end of file diff --git a/sdk/synapse/synapse-artifacts/recordings/browsers/bigdatapools/recording_should_list_bigdatapools.json b/sdk/synapse/synapse-artifacts/recordings/browsers/bigdatapools/recording_should_list_bigdatapools.json new file mode 100644 index 000000000000..737c4b1f2079 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/recordings/browsers/bigdatapools/recording_should_list_bigdatapools.json @@ -0,0 +1,50 @@ +{ + "recordings": [ + { + "method": "POST", + "url": "https://login.microsoftonline.com/88888888-8888-8888-8888-888888888888/oauth2/v2.0/token", + "query": {}, + "requestBody": "response_type=token&grant_type=client_credentials&client_id=azure_client_id&client_secret=azure_client_secret&scope=https%3A%2F%2Fdev.azuresynapse.net%2F.default", + "status": 200, + "response": "{\"token_type\":\"Bearer\",\"expires_in\":86399,\"ext_expires_in\":86399,\"access_token\":\"access_token\"}", + "responseHeaders": { + "cache-control": "no-store, no-cache", + "content-length": "1322", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:15:59 GMT", + "expires": "-1", + "p3p": "CP=\"DSP CUR OTPi IND OTRi ONL FIN\"", + "pragma": "no-cache", + "referrer-policy": "strict-origin-when-cross-origin", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-content-type-options": "nosniff", + "x-ms-ests-server": "2.1.11562.10 - SCUS ProdSlices", + "x-ms-request-id": "1c106eb6-c49c-4606-bd61-888571656a00" + } + }, + { + "method": "GET", + "url": "https://testaccount.dev.azuresynapse.net/bigDataPools", + "query": { + "api-version": "2019-06-01-preview" + }, + "requestBody": null, + "status": 200, + "response": "{\"value\":[{\"properties\":{\"creationDate\":\"2020-11-19T21:41:05.3233333Z\",\"sparkVersion\":\"2.4\",\"nodeCount\":4,\"nodeSize\":\"Small\",\"nodeSizeFamily\":\"MemoryOptimized\",\"autoScale\":{\"enabled\":false,\"minNodeCount\":3,\"maxNodeCount\":10},\"autoPause\":{\"enabled\":true,\"delayInMinutes\":15},\"isComputeIsolationEnabled\":false,\"sessionLevelPackagesEnabled\":false,\"cacheSize\":0,\"dynamicExecutorAllocation\":{\"enabled\":false},\"lastSucceededTimestamp\":\"2020-11-19T21:41:13.2833333Z\",\"provisioningState\":\"Succeeded\"},\"id\":\"/subscriptions/faa080af-c1d8-40ad-9cce-e1a450ca5b57/resourceGroups/xiangyan/providers/Microsoft.ProjectArcadia/workspaces/xysynapsetest/sparkComputes/HamonsSpark\",\"name\":\"HamonsSpark\",\"type\":\"Microsoft.ProjectArcadia/workspaces/sparkComputes\",\"location\":\"westus2\",\"tags\":{}},{\"properties\":{\"creationDate\":\"2021-01-11T06:04:11.4733333Z\",\"sparkVersion\":\"2.4\",\"nodeCount\":0,\"nodeSize\":\"Small\",\"nodeSizeFamily\":\"MemoryOptimized\",\"autoScale\":{\"enabled\":true,\"minNodeCount\":3,\"maxNodeCount\":10},\"autoPause\":{\"enabled\":true,\"delayInMinutes\":15},\"isComputeIsolationEnabled\":false,\"sessionLevelPackagesEnabled\":false,\"cacheSize\":0,\"dynamicExecutorAllocation\":{\"enabled\":false},\"lastSucceededTimestamp\":\"2021-01-11T06:04:18.1666667Z\",\"provisioningState\":\"Succeeded\"},\"id\":\"/subscriptions/faa080af-c1d8-40ad-9cce-e1a450ca5b57/resourceGroups/xiangyan/providers/Microsoft.ProjectArcadia/workspaces/xysynapsetest/sparkComputes/jianghaospool\",\"name\":\"jianghaospool\",\"type\":\"Microsoft.ProjectArcadia/workspaces/sparkComputes\",\"location\":\"westus2\",\"tags\":{}},{\"properties\":{\"creationDate\":\"2020-11-18T19:10:21.6Z\",\"sparkVersion\":\"2.4\",\"nodeCount\":10,\"nodeSize\":\"Small\",\"nodeSizeFamily\":\"MemoryOptimized\",\"autoScale\":{\"enabled\":true,\"minNodeCount\":3,\"maxNodeCount\":10},\"autoPause\":{\"enabled\":true,\"delayInMinutes\":15},\"isComputeIsolationEnabled\":false,\"sessionLevelPackagesEnabled\":false,\"cacheSize\":0,\"dynamicExecutorAllocation\":{\"enabled\":false},\"lastSucceededTimestamp\":\"2020-11-18T19:10:25.52Z\",\"provisioningState\":\"Succeeded\"},\"id\":\"/subscriptions/faa080af-c1d8-40ad-9cce-e1a450ca5b57/resourceGroups/xiangyan/providers/Microsoft.ProjectArcadia/workspaces/xysynapsetest/sparkComputes/testsparkpool\",\"name\":\"testsparkpool\",\"type\":\"Microsoft.ProjectArcadia/workspaces/sparkComputes\",\"location\":\"westus2\",\"tags\":{}}]}", + "responseHeaders": { + "content-length": "2299", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:15:59 GMT", + "server": "Microsoft-HTTPAPI/2.0", + "status": "200", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-ms-request-id": "4c7defe3-74c1-4888-8308-868a5ed4710c" + } + } + ], + "uniqueTestInfo": { + "uniqueName": {}, + "newDate": {} + }, + "hash": "a338136bc3147a1576b8d512fcadaa55" +} \ No newline at end of file diff --git a/sdk/synapse/synapse-artifacts/recordings/browsers/dataflow/recording_should_create_dataflow.json b/sdk/synapse/synapse-artifacts/recordings/browsers/dataflow/recording_should_create_dataflow.json new file mode 100644 index 000000000000..002295571f60 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/recordings/browsers/dataflow/recording_should_create_dataflow.json @@ -0,0 +1,148 @@ +{ + "recordings": [ + { + "method": "POST", + "url": "https://login.microsoftonline.com/88888888-8888-8888-8888-888888888888/oauth2/v2.0/token", + "query": {}, + "requestBody": "response_type=token&grant_type=client_credentials&client_id=azure_client_id&client_secret=azure_client_secret&scope=https%3A%2F%2Fdev.azuresynapse.net%2F.default", + "status": 200, + "response": "{\"token_type\":\"Bearer\",\"expires_in\":86399,\"ext_expires_in\":86399,\"access_token\":\"access_token\"}", + "responseHeaders": { + "cache-control": "no-store, no-cache", + "content-length": "1322", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:15:59 GMT", + "expires": "-1", + "p3p": "CP=\"DSP CUR OTPi IND OTRi ONL FIN\"", + "pragma": "no-cache", + "referrer-policy": "strict-origin-when-cross-origin", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-content-type-options": "nosniff", + "x-ms-ests-server": "2.1.11562.10 - SCUS ProdSlices", + "x-ms-request-id": "7245599c-dde0-4068-b6a2-cc47abb37900" + } + }, + { + "method": "PUT", + "url": "https://testaccount.dev.azuresynapse.net/dataflows/testdataflow", + "query": { + "api-version": "2019-06-01-preview" + }, + "requestBody": "{\"properties\":{\"type\":\"MappingDataFlow\"}}", + "status": 202, + "response": "{\"id\":\"/subscriptions/faa080af-c1d8-40ad-9cce-e1a450ca5b57/resourceGroups/xiangyan/providers/Microsoft.Synapse/workspaces/xysynapsetest/dataflows/testdataflow\",\"recordId\":1439121,\"state\":\"Creating\",\"created\":\"2021-04-05T22:16:00.0166667Z\",\"changed\":\"2021-04-05T22:16:00.0166667Z\",\"type\":\"DataFlow\",\"name\":\"testdataflow\",\"operationId\":\"9732ad0f-2d87-49c4-b533-5a916627f20e\",\"artifactId\":\"E1254504-E8DA-4D96-AF88-927331B1DB45\"}", + "responseHeaders": { + "access-control-allow-headers": "Location, Retry-After", + "access-control-expose-headers": "Location, Retry-After", + "content-length": "425", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:15:59 GMT", + "location": "https://testaccount.dev.azuresynapse.net/operationResults/9732ad0f-2d87-49c4-b533-5a916627f20e?api-version=2019-06-01-preview", + "retry-after": "10", + "server": "Microsoft-HTTPAPI/2.0", + "status": "202", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-ms-request-id": "ec63eeb4-8acc-41c0-979a-2582513a99e1" + } + }, + { + "method": "GET", + "url": "https://testaccount.dev.azuresynapse.net/operationResults/9732ad0f-2d87-49c4-b533-5a916627f20e", + "query": { + "api-version": "2019-06-01-preview" + }, + "requestBody": null, + "status": 202, + "response": "{\"status\":\"InProgress\"}", + "responseHeaders": { + "access-control-allow-headers": "Location, Retry-After", + "access-control-expose-headers": "Location, Retry-After", + "content-length": "23", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:15:59 GMT", + "location": "https://testaccount.dev.azuresynapse.net/operationResults/9732ad0f-2d87-49c4-b533-5a916627f20e?api-version=2019-06-01-preview", + "retry-after": "10", + "server": "Microsoft-HTTPAPI/2.0", + "status": "202", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-ms-request-id": "c055ce21-c118-404d-94d8-7a139921467b" + } + }, + { + "method": "GET", + "url": "https://testaccount.dev.azuresynapse.net/operationResults/9732ad0f-2d87-49c4-b533-5a916627f20e", + "query": { + "api-version": "2019-06-01-preview" + }, + "requestBody": null, + "status": 202, + "response": "{\"status\":\"InProgress\"}", + "responseHeaders": { + "access-control-allow-headers": "Location, Retry-After", + "access-control-expose-headers": "Location, Retry-After", + "content-length": "23", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:16:01 GMT", + "location": "https://testaccount.dev.azuresynapse.net/operationResults/9732ad0f-2d87-49c4-b533-5a916627f20e?api-version=2019-06-01-preview", + "retry-after": "10", + "server": "Microsoft-HTTPAPI/2.0", + "status": "202", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-ms-request-id": "94fc097f-e90a-4728-bcbd-b8fae69936ed" + } + }, + { + "method": "GET", + "url": "https://testaccount.dev.azuresynapse.net/operationResults/9732ad0f-2d87-49c4-b533-5a916627f20e", + "query": { + "api-version": "2019-06-01-preview" + }, + "requestBody": null, + "status": 202, + "response": "{\"status\":\"InProgress\"}", + "responseHeaders": { + "access-control-allow-headers": "Location, Retry-After", + "access-control-expose-headers": "Location, Retry-After", + "content-length": "23", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:16:04 GMT", + "location": "https://testaccount.dev.azuresynapse.net/operationResults/9732ad0f-2d87-49c4-b533-5a916627f20e?api-version=2019-06-01-preview", + "retry-after": "10", + "server": "Microsoft-HTTPAPI/2.0", + "status": "202", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-ms-request-id": "bf902ef9-eefa-4bc6-ab66-8500e54b538e" + } + }, + { + "method": "GET", + "url": "https://testaccount.dev.azuresynapse.net/operationResults/9732ad0f-2d87-49c4-b533-5a916627f20e", + "query": { + "api-version": "2019-06-01-preview" + }, + "requestBody": null, + "status": 200, + "response": "{\"id\":\"/subscriptions/faa080af-c1d8-40ad-9cce-e1a450ca5b57/resourceGroups/xiangyan/providers/Microsoft.Synapse/workspaces/xysynapsetest/dataflows/testdataflow\",\"name\":\"testdataflow\",\"type\":\"Microsoft.Synapse/workspaces/dataflows\",\"properties\":{\"type\":\"MappingDataFlow\"},\"etag\":\"bb067ce2-0000-0800-0000-606b8c240000\"}", + "responseHeaders": { + "cache-control": "no-cache", + "content-length": "316", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:16:06 GMT", + "expires": "-1", + "pragma": "no-cache", + "server": "Microsoft-IIS/10.0 Microsoft-HTTPAPI/2.0", + "status": "200", + "strict-transport-security": "max-age=15724800; includeSubDomains", + "x-content-type-options": "nosniff", + "x-ms-correlation-request-id": "f0fdb02f-493d-4652-b83f-9ada14b96ddc", + "x-ms-request-id": "b8b46e8b-92ab-4371-82c6-be585788f1b5", + "x-powered-by": "ASP.NET" + } + } + ], + "uniqueTestInfo": { + "uniqueName": {}, + "newDate": {} + }, + "hash": "e82c5fd471cc798c2bf3ace653188673" +} \ No newline at end of file diff --git a/sdk/synapse/synapse-artifacts/recordings/browsers/dataflow/recording_should_delete_dataflow.json b/sdk/synapse/synapse-artifacts/recordings/browsers/dataflow/recording_should_delete_dataflow.json new file mode 100644 index 000000000000..4fc0d06f4d4d --- /dev/null +++ b/sdk/synapse/synapse-artifacts/recordings/browsers/dataflow/recording_should_delete_dataflow.json @@ -0,0 +1,117 @@ +{ + "recordings": [ + { + "method": "POST", + "url": "https://login.microsoftonline.com/88888888-8888-8888-8888-888888888888/oauth2/v2.0/token", + "query": {}, + "requestBody": "response_type=token&grant_type=client_credentials&client_id=azure_client_id&client_secret=azure_client_secret&scope=https%3A%2F%2Fdev.azuresynapse.net%2F.default", + "status": 200, + "response": "{\"token_type\":\"Bearer\",\"expires_in\":86399,\"ext_expires_in\":86399,\"access_token\":\"access_token\"}", + "responseHeaders": { + "cache-control": "no-store, no-cache", + "content-length": "1322", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:16:34 GMT", + "expires": "-1", + "p3p": "CP=\"DSP CUR OTPi IND OTRi ONL FIN\"", + "pragma": "no-cache", + "referrer-policy": "strict-origin-when-cross-origin", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-content-type-options": "nosniff", + "x-ms-ests-server": "2.1.11562.10 - NCUS ProdSlices", + "x-ms-request-id": "d751fe13-4d70-45cf-b817-f5a599d46a00" + } + }, + { + "method": "DELETE", + "url": "https://testaccount.dev.azuresynapse.net/dataflows/testdataflow2", + "query": { + "api-version": "2019-06-01-preview" + }, + "requestBody": null, + "status": 202, + "response": "{\"id\":\"/subscriptions/faa080af-c1d8-40ad-9cce-e1a450ca5b57/resourceGroups/xiangyan/providers/Microsoft.Synapse/workspaces/xysynapsetest/dataflows/testdataflow2\",\"recordId\":0,\"state\":\"Deleting\",\"created\":\"0001-01-01T00:00:00\",\"changed\":\"0001-01-01T00:00:00\",\"type\":\"DataFlow\",\"name\":\"testdataflow2\",\"operationId\":\"4493c698-e165-48a1-be3c-07bb09f47014\"}", + "responseHeaders": { + "access-control-allow-headers": "Location", + "access-control-expose-headers": "Location", + "content-length": "351", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:16:35 GMT", + "location": "https://testaccount.dev.azuresynapse.net/operationResults/4493c698-e165-48a1-be3c-07bb09f47014?api-version=2019-06-01-preview", + "server": "Microsoft-HTTPAPI/2.0", + "status": "202", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-ms-request-id": "e7ae3aee-b8c4-4660-acd3-610d7d457d8f" + } + }, + { + "method": "GET", + "url": "https://testaccount.dev.azuresynapse.net/operationResults/4493c698-e165-48a1-be3c-07bb09f47014", + "query": { + "api-version": "2019-06-01-preview" + }, + "requestBody": null, + "status": 202, + "response": "{\"status\":\"InProgress\"}", + "responseHeaders": { + "access-control-allow-headers": "Location, Retry-After", + "access-control-expose-headers": "Location, Retry-After", + "content-length": "23", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:16:35 GMT", + "location": "https://testaccount.dev.azuresynapse.net/operationResults/4493c698-e165-48a1-be3c-07bb09f47014?api-version=2019-06-01-preview", + "retry-after": "10", + "server": "Microsoft-HTTPAPI/2.0", + "status": "202", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-ms-request-id": "7f01593b-91b8-4bb5-8381-f7f31d3d70c7" + } + }, + { + "method": "GET", + "url": "https://testaccount.dev.azuresynapse.net/operationResults/4493c698-e165-48a1-be3c-07bb09f47014", + "query": { + "api-version": "2019-06-01-preview" + }, + "requestBody": null, + "status": 202, + "response": "{\"status\":\"InProgress\"}", + "responseHeaders": { + "access-control-allow-headers": "Location, Retry-After", + "access-control-expose-headers": "Location, Retry-After", + "content-length": "23", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:16:36 GMT", + "location": "https://testaccount.dev.azuresynapse.net/operationResults/4493c698-e165-48a1-be3c-07bb09f47014?api-version=2019-06-01-preview", + "retry-after": "10", + "server": "Microsoft-HTTPAPI/2.0", + "status": "202", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-ms-request-id": "7fdd6133-8479-4eac-aa2c-27b29f1eaf74" + } + }, + { + "method": "GET", + "url": "https://testaccount.dev.azuresynapse.net/operationResults/4493c698-e165-48a1-be3c-07bb09f47014", + "query": { + "api-version": "2019-06-01-preview" + }, + "requestBody": null, + "status": 200, + "response": "", + "responseHeaders": { + "content-length": "0", + "date": "Mon, 05 Apr 2021 22:16:39 GMT", + "server": "Microsoft-HTTPAPI/2.0", + "status": "200", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-ms-request-id": "0c37f76c-0117-4b53-81e7-87bd84185f13" + } + } + ], + "uniqueTestInfo": { + "uniqueName": {}, + "newDate": {} + }, + "hash": "7b4bb372d35391412ee5e8285d203962" +} \ No newline at end of file diff --git a/sdk/synapse/synapse-artifacts/recordings/browsers/dataflow/recording_should_get_dataflow.json b/sdk/synapse/synapse-artifacts/recordings/browsers/dataflow/recording_should_get_dataflow.json new file mode 100644 index 000000000000..e2f4cf8fc5df --- /dev/null +++ b/sdk/synapse/synapse-artifacts/recordings/browsers/dataflow/recording_should_get_dataflow.json @@ -0,0 +1,56 @@ +{ + "recordings": [ + { + "method": "POST", + "url": "https://login.microsoftonline.com/88888888-8888-8888-8888-888888888888/oauth2/v2.0/token", + "query": {}, + "requestBody": "response_type=token&grant_type=client_credentials&client_id=azure_client_id&client_secret=azure_client_secret&scope=https%3A%2F%2Fdev.azuresynapse.net%2F.default", + "status": 200, + "response": "{\"token_type\":\"Bearer\",\"expires_in\":86399,\"ext_expires_in\":86399,\"access_token\":\"access_token\"}", + "responseHeaders": { + "cache-control": "no-store, no-cache", + "content-length": "1322", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:16:10 GMT", + "expires": "-1", + "p3p": "CP=\"DSP CUR OTPi IND OTRi ONL FIN\"", + "pragma": "no-cache", + "referrer-policy": "strict-origin-when-cross-origin", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-content-type-options": "nosniff", + "x-ms-ests-server": "2.1.11562.10 - EUS ProdSlices", + "x-ms-request-id": "7b3097c4-30b7-4190-85ae-fdcd38306c00" + } + }, + { + "method": "GET", + "url": "https://testaccount.dev.azuresynapse.net/dataflows/testdataflow", + "query": { + "api-version": "2019-06-01-preview" + }, + "requestBody": null, + "status": 200, + "response": "{\"id\":\"/subscriptions/faa080af-c1d8-40ad-9cce-e1a450ca5b57/resourceGroups/xiangyan/providers/Microsoft.Synapse/workspaces/xysynapsetest/dataflows/testdataflow\",\"name\":\"testdataflow\",\"type\":\"Microsoft.Synapse/workspaces/dataflows\",\"properties\":{\"type\":\"MappingDataFlow\"},\"etag\":\"bb067ce2-0000-0800-0000-606b8c240000\"}", + "responseHeaders": { + "cache-control": "no-cache", + "content-length": "316", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:16:10 GMT", + "expires": "-1", + "pragma": "no-cache", + "server": "Microsoft-IIS/10.0 Microsoft-HTTPAPI/2.0", + "status": "200", + "strict-transport-security": "max-age=15724800; includeSubDomains", + "x-content-type-options": "nosniff", + "x-ms-correlation-request-id": "223c9a1d-7d3c-4191-964d-0671dd30e427", + "x-ms-request-id": "321a6c92-0013-40a5-9a85-8cf53e6965f3", + "x-powered-by": "ASP.NET" + } + } + ], + "uniqueTestInfo": { + "uniqueName": {}, + "newDate": {} + }, + "hash": "ae75ee88a3c419458ca442bc2cfc8f76" +} \ No newline at end of file diff --git a/sdk/synapse/synapse-artifacts/recordings/browsers/dataflow/recording_should_list_dataflows.json b/sdk/synapse/synapse-artifacts/recordings/browsers/dataflow/recording_should_list_dataflows.json new file mode 100644 index 000000000000..f092d529789e --- /dev/null +++ b/sdk/synapse/synapse-artifacts/recordings/browsers/dataflow/recording_should_list_dataflows.json @@ -0,0 +1,50 @@ +{ + "recordings": [ + { + "method": "POST", + "url": "https://login.microsoftonline.com/88888888-8888-8888-8888-888888888888/oauth2/v2.0/token", + "query": {}, + "requestBody": "response_type=token&grant_type=client_credentials&client_id=azure_client_id&client_secret=azure_client_secret&scope=https%3A%2F%2Fdev.azuresynapse.net%2F.default", + "status": 200, + "response": "{\"token_type\":\"Bearer\",\"expires_in\":86399,\"ext_expires_in\":86399,\"access_token\":\"access_token\"}", + "responseHeaders": { + "cache-control": "no-store, no-cache", + "content-length": "1322", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:16:10 GMT", + "expires": "-1", + "p3p": "CP=\"DSP CUR OTPi IND OTRi ONL FIN\"", + "pragma": "no-cache", + "referrer-policy": "strict-origin-when-cross-origin", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-content-type-options": "nosniff", + "x-ms-ests-server": "2.1.11562.10 - SCUS ProdSlices", + "x-ms-request-id": "1c106eb6-c49c-4606-bd61-8885e2666a00" + } + }, + { + "method": "GET", + "url": "https://testaccount.dev.azuresynapse.net/dataflows", + "query": { + "api-version": "2019-06-01-preview" + }, + "requestBody": null, + "status": 200, + "response": "{\"value\":[{\"id\":\"/subscriptions/faa080af-c1d8-40ad-9cce-e1a450ca5b57/resourceGroups/xiangyan/providers/Microsoft.Synapse/workspaces/xysynapsetest/dataflows/testdataflow\",\"name\":\"testdataflow\",\"type\":\"Microsoft.Synapse/workspaces/dataflows\",\"etag\":\"bb067ce2-0000-0800-0000-606b8c240000\",\"properties\":{\"type\":\"MappingDataFlow\"}}]}", + "responseHeaders": { + "content-length": "328", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:16:10 GMT", + "server": "Microsoft-HTTPAPI/2.0", + "status": "200", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-ms-request-id": "6ed26ba2-2b73-487e-933a-49cb53772c44" + } + } + ], + "uniqueTestInfo": { + "uniqueName": {}, + "newDate": {} + }, + "hash": "90ec8ab1ad4af8fa149a0615d4355c93" +} \ No newline at end of file diff --git a/sdk/synapse/synapse-artifacts/recordings/browsers/dataflow/recording_should_rename_dataflow.json b/sdk/synapse/synapse-artifacts/recordings/browsers/dataflow/recording_should_rename_dataflow.json new file mode 100644 index 000000000000..d8615ad05e49 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/recordings/browsers/dataflow/recording_should_rename_dataflow.json @@ -0,0 +1,302 @@ +{ + "recordings": [ + { + "method": "POST", + "url": "https://login.microsoftonline.com/88888888-8888-8888-8888-888888888888/oauth2/v2.0/token", + "query": {}, + "requestBody": "response_type=token&grant_type=client_credentials&client_id=azure_client_id&client_secret=azure_client_secret&scope=https%3A%2F%2Fdev.azuresynapse.net%2F.default", + "status": 200, + "response": "{\"token_type\":\"Bearer\",\"expires_in\":86399,\"ext_expires_in\":86399,\"access_token\":\"access_token\"}", + "responseHeaders": { + "cache-control": "no-store, no-cache", + "content-length": "1322", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:16:10 GMT", + "expires": "-1", + "p3p": "CP=\"DSP CUR OTPi IND OTRi ONL FIN\"", + "pragma": "no-cache", + "referrer-policy": "strict-origin-when-cross-origin", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-content-type-options": "nosniff", + "x-ms-ests-server": "2.1.11562.10 - EUS ProdSlices", + "x-ms-request-id": "817723d6-1568-4d45-99b2-5a8f125a6a00" + } + }, + { + "method": "POST", + "url": "https://testaccount.dev.azuresynapse.net/dataflows/testdataflow/rename", + "query": { + "api-version": "2019-06-01-preview" + }, + "requestBody": "{\"newName\":\"testdataflow2\"}", + "status": 202, + "response": "{\"id\":\"/subscriptions/faa080af-c1d8-40ad-9cce-e1a450ca5b57/resourceGroups/xiangyan/providers/Microsoft.Synapse/workspaces/xysynapsetest/dataflows/testdataflow\",\"recordId\":1439121,\"state\":\"Renaming\",\"created\":\"2021-04-05T22:16:00.0166667Z\",\"changed\":\"2021-04-05T22:16:04.76Z\",\"type\":\"DataFlow\",\"name\":\"testdataflow\",\"operationId\":\"372db187-2845-47e9-b1db-d1bd9614a7df\",\"artifactId\":\"E1254504-E8DA-4D96-AF88-927331B1DB45\"}", + "responseHeaders": { + "access-control-allow-headers": "Location, Retry-After", + "access-control-expose-headers": "Location, Retry-After", + "content-length": "420", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:16:10 GMT", + "location": "https://testaccount.dev.azuresynapse.net/operationResults/372db187-2845-47e9-b1db-d1bd9614a7df?api-version=2019-06-01-preview", + "retry-after": "10", + "server": "Microsoft-HTTPAPI/2.0", + "status": "202", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-ms-request-id": "cc7d54d3-6410-4362-9335-c88eab0c49e0" + } + }, + { + "method": "GET", + "url": "https://testaccount.dev.azuresynapse.net/operationResults/372db187-2845-47e9-b1db-d1bd9614a7df", + "query": { + "api-version": "2019-06-01-preview" + }, + "requestBody": null, + "status": 202, + "response": "{\"status\":\"InProgress\"}", + "responseHeaders": { + "access-control-allow-headers": "Location, Retry-After", + "access-control-expose-headers": "Location, Retry-After", + "content-length": "23", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:16:10 GMT", + "location": "https://testaccount.dev.azuresynapse.net/operationResults/372db187-2845-47e9-b1db-d1bd9614a7df?api-version=2019-06-01-preview", + "retry-after": "10", + "server": "Microsoft-HTTPAPI/2.0", + "status": "202", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-ms-request-id": "39a898d8-e80a-4721-84a3-065001f4b727" + } + }, + { + "method": "GET", + "url": "https://testaccount.dev.azuresynapse.net/operationResults/372db187-2845-47e9-b1db-d1bd9614a7df", + "query": { + "api-version": "2019-06-01-preview" + }, + "requestBody": null, + "status": 202, + "response": "{\"status\":\"InProgress\"}", + "responseHeaders": { + "access-control-allow-headers": "Location, Retry-After", + "access-control-expose-headers": "Location, Retry-After", + "content-length": "23", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:16:12 GMT", + "location": "https://testaccount.dev.azuresynapse.net/operationResults/372db187-2845-47e9-b1db-d1bd9614a7df?api-version=2019-06-01-preview", + "retry-after": "10", + "server": "Microsoft-HTTPAPI/2.0", + "status": "202", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-ms-request-id": "33a59889-67c7-42af-8733-e8229178e657" + } + }, + { + "method": "GET", + "url": "https://testaccount.dev.azuresynapse.net/operationResults/372db187-2845-47e9-b1db-d1bd9614a7df", + "query": { + "api-version": "2019-06-01-preview" + }, + "requestBody": null, + "status": 202, + "response": "{\"status\":\"InProgress\"}", + "responseHeaders": { + "access-control-allow-headers": "Location, Retry-After", + "access-control-expose-headers": "Location, Retry-After", + "content-length": "23", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:16:14 GMT", + "location": "https://testaccount.dev.azuresynapse.net/operationResults/372db187-2845-47e9-b1db-d1bd9614a7df?api-version=2019-06-01-preview", + "retry-after": "10", + "server": "Microsoft-HTTPAPI/2.0", + "status": "202", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-ms-request-id": "493d28e9-f047-4e3b-adc6-408e8409c9a4" + } + }, + { + "method": "GET", + "url": "https://testaccount.dev.azuresynapse.net/operationResults/372db187-2845-47e9-b1db-d1bd9614a7df", + "query": { + "api-version": "2019-06-01-preview" + }, + "requestBody": null, + "status": 202, + "response": "{\"status\":\"InProgress\"}", + "responseHeaders": { + "access-control-allow-headers": "Location, Retry-After", + "access-control-expose-headers": "Location, Retry-After", + "content-length": "23", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:16:16 GMT", + "location": "https://testaccount.dev.azuresynapse.net/operationResults/372db187-2845-47e9-b1db-d1bd9614a7df?api-version=2019-06-01-preview", + "retry-after": "10", + "server": "Microsoft-HTTPAPI/2.0", + "status": "202", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-ms-request-id": "e593dc3a-0502-42ff-b96b-8209fc9ab368" + } + }, + { + "method": "GET", + "url": "https://testaccount.dev.azuresynapse.net/operationResults/372db187-2845-47e9-b1db-d1bd9614a7df", + "query": { + "api-version": "2019-06-01-preview" + }, + "requestBody": null, + "status": 202, + "response": "{\"status\":\"InProgress\"}", + "responseHeaders": { + "access-control-allow-headers": "Location, Retry-After", + "access-control-expose-headers": "Location, Retry-After", + "content-length": "23", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:16:18 GMT", + "location": "https://testaccount.dev.azuresynapse.net/operationResults/372db187-2845-47e9-b1db-d1bd9614a7df?api-version=2019-06-01-preview", + "retry-after": "10", + "server": "Microsoft-HTTPAPI/2.0", + "status": "202", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-ms-request-id": "767b7bc8-3192-4e99-b558-c39ef61a7777" + } + }, + { + "method": "GET", + "url": "https://testaccount.dev.azuresynapse.net/operationResults/372db187-2845-47e9-b1db-d1bd9614a7df", + "query": { + "api-version": "2019-06-01-preview" + }, + "requestBody": null, + "status": 202, + "response": "{\"status\":\"InProgress\"}", + "responseHeaders": { + "access-control-allow-headers": "Location, Retry-After", + "access-control-expose-headers": "Location, Retry-After", + "content-length": "23", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:16:20 GMT", + "location": "https://testaccount.dev.azuresynapse.net/operationResults/372db187-2845-47e9-b1db-d1bd9614a7df?api-version=2019-06-01-preview", + "retry-after": "10", + "server": "Microsoft-HTTPAPI/2.0", + "status": "202", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-ms-request-id": "b0919bec-2b3f-4830-84a2-3ccbd1f3ced9" + } + }, + { + "method": "GET", + "url": "https://testaccount.dev.azuresynapse.net/operationResults/372db187-2845-47e9-b1db-d1bd9614a7df", + "query": { + "api-version": "2019-06-01-preview" + }, + "requestBody": null, + "status": 202, + "response": "{\"status\":\"InProgress\"}", + "responseHeaders": { + "access-control-allow-headers": "Location, Retry-After", + "access-control-expose-headers": "Location, Retry-After", + "content-length": "23", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:16:22 GMT", + "location": "https://testaccount.dev.azuresynapse.net/operationResults/372db187-2845-47e9-b1db-d1bd9614a7df?api-version=2019-06-01-preview", + "retry-after": "10", + "server": "Microsoft-HTTPAPI/2.0", + "status": "202", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-ms-request-id": "f79d8e9b-93a3-4cde-a063-6397f9ac760d" + } + }, + { + "method": "GET", + "url": "https://testaccount.dev.azuresynapse.net/operationResults/372db187-2845-47e9-b1db-d1bd9614a7df", + "query": { + "api-version": "2019-06-01-preview" + }, + "requestBody": null, + "status": 202, + "response": "{\"status\":\"InProgress\"}", + "responseHeaders": { + "access-control-allow-headers": "Location, Retry-After", + "access-control-expose-headers": "Location, Retry-After", + "content-length": "23", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:16:24 GMT", + "location": "https://testaccount.dev.azuresynapse.net/operationResults/372db187-2845-47e9-b1db-d1bd9614a7df?api-version=2019-06-01-preview", + "retry-after": "10", + "server": "Microsoft-HTTPAPI/2.0", + "status": "202", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-ms-request-id": "b5076dee-9787-4a7d-8d0e-43189e8d2cd6" + } + }, + { + "method": "GET", + "url": "https://testaccount.dev.azuresynapse.net/operationResults/372db187-2845-47e9-b1db-d1bd9614a7df", + "query": { + "api-version": "2019-06-01-preview" + }, + "requestBody": null, + "status": 202, + "response": "{\"status\":\"InProgress\"}", + "responseHeaders": { + "access-control-allow-headers": "Location, Retry-After", + "access-control-expose-headers": "Location, Retry-After", + "content-length": "23", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:16:26 GMT", + "location": "https://testaccount.dev.azuresynapse.net/operationResults/372db187-2845-47e9-b1db-d1bd9614a7df?api-version=2019-06-01-preview", + "retry-after": "10", + "server": "Microsoft-HTTPAPI/2.0", + "status": "202", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-ms-request-id": "96f96fea-da29-404f-9b61-75fe484eee05" + } + }, + { + "method": "GET", + "url": "https://testaccount.dev.azuresynapse.net/operationResults/372db187-2845-47e9-b1db-d1bd9614a7df", + "query": { + "api-version": "2019-06-01-preview" + }, + "requestBody": null, + "status": 202, + "response": "{\"status\":\"InProgress\"}", + "responseHeaders": { + "access-control-allow-headers": "Location, Retry-After", + "access-control-expose-headers": "Location, Retry-After", + "content-length": "23", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:16:28 GMT", + "location": "https://testaccount.dev.azuresynapse.net/operationResults/372db187-2845-47e9-b1db-d1bd9614a7df?api-version=2019-06-01-preview", + "retry-after": "10", + "server": "Microsoft-HTTPAPI/2.0", + "status": "202", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-ms-request-id": "8b3447ae-11bc-4adc-8e57-4a2eefc60aed" + } + }, + { + "method": "GET", + "url": "https://testaccount.dev.azuresynapse.net/operationResults/372db187-2845-47e9-b1db-d1bd9614a7df", + "query": { + "api-version": "2019-06-01-preview" + }, + "requestBody": null, + "status": 200, + "response": "", + "responseHeaders": { + "content-length": "0", + "date": "Mon, 05 Apr 2021 22:16:31 GMT", + "server": "Microsoft-HTTPAPI/2.0", + "status": "200", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-ms-request-id": "83104349-c61a-4a51-beca-9fadc0c308c9" + } + } + ], + "uniqueTestInfo": { + "uniqueName": {}, + "newDate": {} + }, + "hash": "9ada8a136727f20f7ffb334d7f10cad1" +} \ No newline at end of file diff --git a/sdk/synapse/synapse-artifacts/recordings/browsers/integrationruntimes/recording_should_get_integrationruntimes.json b/sdk/synapse/synapse-artifacts/recordings/browsers/integrationruntimes/recording_should_get_integrationruntimes.json new file mode 100644 index 000000000000..32d5f4828ec6 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/recordings/browsers/integrationruntimes/recording_should_get_integrationruntimes.json @@ -0,0 +1,56 @@ +{ + "recordings": [ + { + "method": "POST", + "url": "https://login.microsoftonline.com/88888888-8888-8888-8888-888888888888/oauth2/v2.0/token", + "query": {}, + "requestBody": "response_type=token&grant_type=client_credentials&client_id=azure_client_id&client_secret=azure_client_secret&scope=https%3A%2F%2Fdev.azuresynapse.net%2F.default", + "status": 200, + "response": "{\"token_type\":\"Bearer\",\"expires_in\":86399,\"ext_expires_in\":86399,\"access_token\":\"access_token\"}", + "responseHeaders": { + "cache-control": "no-store, no-cache", + "content-length": "1322", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:16:43 GMT", + "expires": "-1", + "p3p": "CP=\"DSP CUR OTPi IND OTRi ONL FIN\"", + "pragma": "no-cache", + "referrer-policy": "strict-origin-when-cross-origin", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-content-type-options": "nosniff", + "x-ms-ests-server": "2.1.11562.10 - SCUS ProdSlices", + "x-ms-request-id": "7245599c-dde0-4068-b6a2-cc47c0b97900" + } + }, + { + "method": "GET", + "url": "https://testaccount.dev.azuresynapse.net/integrationRuntimes/AutoResolveIntegrationRuntime", + "query": { + "api-version": "2019-06-01-preview" + }, + "requestBody": null, + "status": 200, + "response": "{\"id\":\"/subscriptions/faa080af-c1d8-40ad-9cce-e1a450ca5b57/resourceGroups/xiangyan/providers/Microsoft.Synapse/workspaces/xysynapsetest/integrationruntimes/AutoResolveIntegrationRuntime\",\"name\":\"AutoResolveIntegrationRuntime\",\"type\":\"Microsoft.Synapse/workspaces/integrationruntimes\",\"properties\":{\"type\":\"Managed\",\"typeProperties\":{\"computeProperties\":{\"location\":\"AutoResolve\"}},\"provisioningState\":null},\"etag\":\"3801cded-0000-0800-0000-5fa34e1b0000\"}", + "responseHeaders": { + "cache-control": "no-cache", + "content-length": "453", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:16:43 GMT", + "expires": "-1", + "pragma": "no-cache", + "server": "Microsoft-IIS/10.0 Microsoft-HTTPAPI/2.0", + "status": "200", + "strict-transport-security": "max-age=15724800; includeSubDomains", + "x-content-type-options": "nosniff", + "x-ms-correlation-request-id": "ef687d66-9871-4c0b-a67a-16d09e58659b", + "x-ms-request-id": "54b27f78-5641-4a3d-ac24-e88eac1b4031", + "x-powered-by": "ASP.NET" + } + } + ], + "uniqueTestInfo": { + "uniqueName": {}, + "newDate": {} + }, + "hash": "ca53856833fe39b942e7b539fc5015b4" +} \ No newline at end of file diff --git a/sdk/synapse/synapse-artifacts/recordings/browsers/integrationruntimes/recording_should_list_integrationruntimes.json b/sdk/synapse/synapse-artifacts/recordings/browsers/integrationruntimes/recording_should_list_integrationruntimes.json new file mode 100644 index 000000000000..2df15395f7aa --- /dev/null +++ b/sdk/synapse/synapse-artifacts/recordings/browsers/integrationruntimes/recording_should_list_integrationruntimes.json @@ -0,0 +1,56 @@ +{ + "recordings": [ + { + "method": "POST", + "url": "https://login.microsoftonline.com/88888888-8888-8888-8888-888888888888/oauth2/v2.0/token", + "query": {}, + "requestBody": "response_type=token&grant_type=client_credentials&client_id=azure_client_id&client_secret=azure_client_secret&scope=https%3A%2F%2Fdev.azuresynapse.net%2F.default", + "status": 200, + "response": "{\"token_type\":\"Bearer\",\"expires_in\":86399,\"ext_expires_in\":86399,\"access_token\":\"access_token\"}", + "responseHeaders": { + "cache-control": "no-store, no-cache", + "content-length": "1322", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:16:43 GMT", + "expires": "-1", + "p3p": "CP=\"DSP CUR OTPi IND OTRi ONL FIN\"", + "pragma": "no-cache", + "referrer-policy": "strict-origin-when-cross-origin", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-content-type-options": "nosniff", + "x-ms-ests-server": "2.1.11562.10 - EUS ProdSlices", + "x-ms-request-id": "7b3097c4-30b7-4190-85ae-fdcde1346c00" + } + }, + { + "method": "GET", + "url": "https://testaccount.dev.azuresynapse.net/integrationRuntimes", + "query": { + "api-version": "2019-06-01-preview" + }, + "requestBody": null, + "status": 200, + "response": "{\"value\":[{\"id\":\"/subscriptions/faa080af-c1d8-40ad-9cce-e1a450ca5b57/resourceGroups/xiangyan/providers/Microsoft.Synapse/workspaces/xysynapsetest/integrationruntimes/AutoResolveIntegrationRuntime\",\"name\":\"AutoResolveIntegrationRuntime\",\"type\":\"Microsoft.Synapse/workspaces/integrationruntimes\",\"properties\":{\"type\":\"Managed\",\"typeProperties\":{\"computeProperties\":{\"location\":\"AutoResolve\"}},\"provisioningState\":null},\"etag\":\"3801cded-0000-0800-0000-5fa34e1b0000\"}]}", + "responseHeaders": { + "cache-control": "no-cache", + "content-length": "465", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:16:43 GMT", + "expires": "-1", + "pragma": "no-cache", + "server": "Microsoft-IIS/10.0 Microsoft-HTTPAPI/2.0", + "status": "200", + "strict-transport-security": "max-age=15724800; includeSubDomains", + "x-content-type-options": "nosniff", + "x-ms-correlation-request-id": "ad890f46-7427-4412-a1c6-fa9d1fe759d7", + "x-ms-request-id": "51e1c352-1c7b-4359-8252-d005931a02d5", + "x-powered-by": "ASP.NET" + } + } + ], + "uniqueTestInfo": { + "uniqueName": {}, + "newDate": {} + }, + "hash": "850f616e1a3e72798091cee09b92372f" +} \ No newline at end of file diff --git a/sdk/synapse/synapse-artifacts/recordings/browsers/library/recording_should_create_library.json b/sdk/synapse/synapse-artifacts/recordings/browsers/library/recording_should_create_library.json new file mode 100644 index 000000000000..2b74f2b231b9 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/recordings/browsers/library/recording_should_create_library.json @@ -0,0 +1,142 @@ +{ + "recordings": [ + { + "method": "POST", + "url": "https://login.microsoftonline.com/88888888-8888-8888-8888-888888888888/oauth2/v2.0/token", + "query": {}, + "requestBody": "response_type=token&grant_type=client_credentials&client_id=azure_client_id&client_secret=azure_client_secret&scope=https%3A%2F%2Fdev.azuresynapse.net%2F.default", + "status": 200, + "response": "{\"token_type\":\"Bearer\",\"expires_in\":86399,\"ext_expires_in\":86399,\"access_token\":\"access_token\"}", + "responseHeaders": { + "cache-control": "no-store, no-cache", + "content-length": "1322", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:16:43 GMT", + "expires": "-1", + "p3p": "CP=\"DSP CUR OTPi IND OTRi ONL FIN\"", + "pragma": "no-cache", + "referrer-policy": "strict-origin-when-cross-origin", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-content-type-options": "nosniff", + "x-ms-ests-server": "2.1.11562.10 - SCUS ProdSlices", + "x-ms-request-id": "1c106eb6-c49c-4606-bd61-88851a6b6a00" + } + }, + { + "method": "PUT", + "url": "https://testaccount.dev.azuresynapse.net/libraries/testLibraryName.jar", + "query": { + "api-version": "2019-06-01-preview" + }, + "requestBody": null, + "status": 202, + "response": "{\"id\":\"/subscriptions/faa080af-c1d8-40ad-9cce-e1a450ca5b57/resourceGroups/xiangyan/providers/Microsoft.Synapse/workspaces/xysynapsetest/libraries/testLibraryName.jar\",\"recordId\":1439124,\"state\":\"Creating\",\"created\":\"2021-04-05T22:16:43.7433333Z\",\"changed\":\"2021-04-05T22:16:43.7433333Z\",\"type\":\"LibraryArtifact\",\"name\":\"testLibraryName.jar\",\"operationId\":\"bd86e054-3ad1-4db3-98e9-f5005f4d967a\",\"artifactId\":\"222B843C-6813-462B-A01A-D3AABE1428D5\"}", + "responseHeaders": { + "access-control-allow-headers": "Location, Retry-After", + "access-control-expose-headers": "Location, Retry-After", + "content-length": "446", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:16:43 GMT", + "location": "https://testaccount.dev.azuresynapse.net/libraryOperationResults/bd86e054-3ad1-4db3-98e9-f5005f4d967a?api-version=2019-06-01-preview", + "retry-after": "10", + "server": "Microsoft-HTTPAPI/2.0", + "status": "202", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-ms-request-id": "ac3f017a-8eff-45d5-93ad-85f3dac5db6d" + } + }, + { + "method": "GET", + "url": "https://testaccount.dev.azuresynapse.net/libraryOperationResults/bd86e054-3ad1-4db3-98e9-f5005f4d967a", + "query": { + "api-version": "2019-06-01-preview" + }, + "requestBody": null, + "status": 202, + "response": "{\"status\":\"InProgress\"}", + "responseHeaders": { + "access-control-allow-headers": "Location, Retry-After", + "access-control-expose-headers": "Location, Retry-After", + "content-length": "23", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:16:43 GMT", + "location": "https://testaccount.dev.azuresynapse.net/libraryOperationResults/bd86e054-3ad1-4db3-98e9-f5005f4d967a?api-version=2019-06-01-preview", + "retry-after": "10", + "server": "Microsoft-HTTPAPI/2.0", + "status": "202", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-ms-request-id": "29e0a7af-ac1e-4ea7-9620-be5d626d187f" + } + }, + { + "method": "GET", + "url": "https://testaccount.dev.azuresynapse.net/libraryOperationResults/bd86e054-3ad1-4db3-98e9-f5005f4d967a", + "query": { + "api-version": "2019-06-01-preview" + }, + "requestBody": null, + "status": 202, + "response": "{\"status\":\"InProgress\"}", + "responseHeaders": { + "access-control-allow-headers": "Location, Retry-After", + "access-control-expose-headers": "Location, Retry-After", + "content-length": "23", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:16:45 GMT", + "location": "https://testaccount.dev.azuresynapse.net/libraryOperationResults/bd86e054-3ad1-4db3-98e9-f5005f4d967a?api-version=2019-06-01-preview", + "retry-after": "10", + "server": "Microsoft-HTTPAPI/2.0", + "status": "202", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-ms-request-id": "283e2218-8f42-4d40-a132-6e2323ccf569" + } + }, + { + "method": "GET", + "url": "https://testaccount.dev.azuresynapse.net/libraryOperationResults/bd86e054-3ad1-4db3-98e9-f5005f4d967a", + "query": { + "api-version": "2019-06-01-preview" + }, + "requestBody": null, + "status": 202, + "response": "{\"status\":\"InProgress\"}", + "responseHeaders": { + "access-control-allow-headers": "Location, Retry-After", + "access-control-expose-headers": "Location, Retry-After", + "content-length": "23", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:16:47 GMT", + "location": "https://testaccount.dev.azuresynapse.net/libraryOperationResults/bd86e054-3ad1-4db3-98e9-f5005f4d967a?api-version=2019-06-01-preview", + "retry-after": "10", + "server": "Microsoft-HTTPAPI/2.0", + "status": "202", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-ms-request-id": "24987a83-ab8d-4bac-89ed-87c6d590b7e7" + } + }, + { + "method": "GET", + "url": "https://testaccount.dev.azuresynapse.net/libraryOperationResults/bd86e054-3ad1-4db3-98e9-f5005f4d967a", + "query": { + "api-version": "2019-06-01-preview" + }, + "requestBody": null, + "status": 200, + "response": "{\"id\":\"/subscriptions/faa080af-c1d8-40ad-9cce-e1a450ca5b57/resourceGroups/xiangyan/providers/Microsoft.Synapse/workspaces/xysynapsetest/libraries/testLibraryName.jar\",\"name\":\"testLibraryName.jar\",\"type\":\"Microsoft.Synapse/workspaces/libraries\",\"properties\":{\"name\":\"testLibraryName.jar\",\"path\":\"xysynapsetest/libraries/testLibraryName.jar\",\"containerName\":\"prep\",\"uploadedTimestamp\":\"2021-04-05T22:16:43.7395879+00:00\",\"type\":\"jar\",\"provisioningStatus\":\"Incomplete\",\"creatorId\":\"30511c9d-ba1a-4c7b-b422-5b543da11b3f\"},\"etag\":\"bc063721-0000-0800-0000-606b8c4f0000\"}", + "responseHeaders": { + "content-length": "564", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:16:49 GMT", + "server": "Microsoft-HTTPAPI/2.0", + "status": "200", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-ms-request-id": "a4528c25-476e-4b26-ae3e-217eee097b6d" + } + } + ], + "uniqueTestInfo": { + "uniqueName": {}, + "newDate": {} + }, + "hash": "3e8ee7985bf7a35bb0232eec37f4e886" +} \ No newline at end of file diff --git a/sdk/synapse/synapse-artifacts/recordings/browsers/library/recording_should_delete_library.json b/sdk/synapse/synapse-artifacts/recordings/browsers/library/recording_should_delete_library.json new file mode 100644 index 000000000000..ba68d06e6d99 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/recordings/browsers/library/recording_should_delete_library.json @@ -0,0 +1,140 @@ +{ + "recordings": [ + { + "method": "POST", + "url": "https://login.microsoftonline.com/88888888-8888-8888-8888-888888888888/oauth2/v2.0/token", + "query": {}, + "requestBody": "response_type=token&grant_type=client_credentials&client_id=azure_client_id&client_secret=azure_client_secret&scope=https%3A%2F%2Fdev.azuresynapse.net%2F.default", + "status": 200, + "response": "{\"token_type\":\"Bearer\",\"expires_in\":86399,\"ext_expires_in\":86399,\"access_token\":\"access_token\"}", + "responseHeaders": { + "cache-control": "no-store, no-cache", + "content-length": "1322", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:16:54 GMT", + "expires": "-1", + "p3p": "CP=\"DSP CUR OTPi IND OTRi ONL FIN\"", + "pragma": "no-cache", + "referrer-policy": "strict-origin-when-cross-origin", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-content-type-options": "nosniff", + "x-ms-ests-server": "2.1.11562.10 - NCUS ProdSlices", + "x-ms-request-id": "d751fe13-4d70-45cf-b817-f5a5ccd66a00" + } + }, + { + "method": "DELETE", + "url": "https://testaccount.dev.azuresynapse.net/libraries/testLibraryName.jar", + "query": { + "api-version": "2019-06-01-preview" + }, + "requestBody": null, + "status": 202, + "response": "{\"id\":\"/subscriptions/faa080af-c1d8-40ad-9cce-e1a450ca5b57/resourceGroups/xiangyan/providers/Microsoft.Synapse/workspaces/xysynapsetest/libraries/testLibraryName.jar\",\"recordId\":0,\"state\":\"Deleting\",\"created\":\"0001-01-01T00:00:00\",\"changed\":\"0001-01-01T00:00:00\",\"type\":\"LibraryArtifact\",\"name\":\"testLibraryName.jar\",\"operationId\":\"b6303805-ecf9-4800-acb3-ddd6d99869b5\"}", + "responseHeaders": { + "access-control-allow-headers": "Location", + "access-control-expose-headers": "Location", + "content-length": "370", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:16:54 GMT", + "location": "https://testaccount.dev.azuresynapse.net/libraryOperationResults/b6303805-ecf9-4800-acb3-ddd6d99869b5?api-version=2019-06-01-preview", + "server": "Microsoft-HTTPAPI/2.0", + "status": "202", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-ms-request-id": "ed94e616-3435-4f3c-8dea-261f933657d7" + } + }, + { + "method": "GET", + "url": "https://testaccount.dev.azuresynapse.net/libraryOperationResults/b6303805-ecf9-4800-acb3-ddd6d99869b5", + "query": { + "api-version": "2019-06-01-preview" + }, + "requestBody": null, + "status": 202, + "response": "{\"status\":\"InProgress\"}", + "responseHeaders": { + "access-control-allow-headers": "Location, Retry-After", + "access-control-expose-headers": "Location, Retry-After", + "content-length": "23", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:16:54 GMT", + "location": "https://testaccount.dev.azuresynapse.net/libraryOperationResults/b6303805-ecf9-4800-acb3-ddd6d99869b5?api-version=2019-06-01-preview", + "retry-after": "10", + "server": "Microsoft-HTTPAPI/2.0", + "status": "202", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-ms-request-id": "daaab812-1d9f-43e4-b8f3-a483f816a1c3" + } + }, + { + "method": "GET", + "url": "https://testaccount.dev.azuresynapse.net/libraryOperationResults/b6303805-ecf9-4800-acb3-ddd6d99869b5", + "query": { + "api-version": "2019-06-01-preview" + }, + "requestBody": null, + "status": 202, + "response": "{\"status\":\"InProgress\"}", + "responseHeaders": { + "access-control-allow-headers": "Location, Retry-After", + "access-control-expose-headers": "Location, Retry-After", + "content-length": "23", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:16:56 GMT", + "location": "https://testaccount.dev.azuresynapse.net/libraryOperationResults/b6303805-ecf9-4800-acb3-ddd6d99869b5?api-version=2019-06-01-preview", + "retry-after": "10", + "server": "Microsoft-HTTPAPI/2.0", + "status": "202", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-ms-request-id": "60725c3a-bf90-40a7-8a21-57a1ae4aaeb5" + } + }, + { + "method": "GET", + "url": "https://testaccount.dev.azuresynapse.net/libraryOperationResults/b6303805-ecf9-4800-acb3-ddd6d99869b5", + "query": { + "api-version": "2019-06-01-preview" + }, + "requestBody": null, + "status": 202, + "response": "{\"status\":\"InProgress\"}", + "responseHeaders": { + "access-control-allow-headers": "Location, Retry-After", + "access-control-expose-headers": "Location, Retry-After", + "content-length": "23", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:16:58 GMT", + "location": "https://testaccount.dev.azuresynapse.net/libraryOperationResults/b6303805-ecf9-4800-acb3-ddd6d99869b5?api-version=2019-06-01-preview", + "retry-after": "10", + "server": "Microsoft-HTTPAPI/2.0", + "status": "202", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-ms-request-id": "c0c132ab-630c-48a4-a496-0c52668f7dc6" + } + }, + { + "method": "GET", + "url": "https://testaccount.dev.azuresynapse.net/libraryOperationResults/b6303805-ecf9-4800-acb3-ddd6d99869b5", + "query": { + "api-version": "2019-06-01-preview" + }, + "requestBody": null, + "status": 200, + "response": "", + "responseHeaders": { + "content-length": "0", + "date": "Mon, 05 Apr 2021 22:17:00 GMT", + "server": "Microsoft-HTTPAPI/2.0", + "status": "200", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-ms-request-id": "381961a9-11fb-4079-be4e-840593846ed7" + } + } + ], + "uniqueTestInfo": { + "uniqueName": {}, + "newDate": {} + }, + "hash": "107a424bb707e86b132e6e85a069cdc2" +} \ No newline at end of file diff --git a/sdk/synapse/synapse-artifacts/recordings/browsers/library/recording_should_get_library.json b/sdk/synapse/synapse-artifacts/recordings/browsers/library/recording_should_get_library.json new file mode 100644 index 000000000000..b55ebd7f7583 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/recordings/browsers/library/recording_should_get_library.json @@ -0,0 +1,50 @@ +{ + "recordings": [ + { + "method": "POST", + "url": "https://login.microsoftonline.com/88888888-8888-8888-8888-888888888888/oauth2/v2.0/token", + "query": {}, + "requestBody": "response_type=token&grant_type=client_credentials&client_id=azure_client_id&client_secret=azure_client_secret&scope=https%3A%2F%2Fdev.azuresynapse.net%2F.default", + "status": 200, + "response": "{\"token_type\":\"Bearer\",\"expires_in\":86399,\"ext_expires_in\":86399,\"access_token\":\"access_token\"}", + "responseHeaders": { + "cache-control": "no-store, no-cache", + "content-length": "1322", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:16:54 GMT", + "expires": "-1", + "p3p": "CP=\"DSP CUR OTPi IND OTRi ONL FIN\"", + "pragma": "no-cache", + "referrer-policy": "strict-origin-when-cross-origin", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-content-type-options": "nosniff", + "x-ms-ests-server": "2.1.11562.10 - NCUS ProdSlices", + "x-ms-request-id": "8ebd0574-c18b-43e2-bdf0-09683e436400" + } + }, + { + "method": "GET", + "url": "https://testaccount.dev.azuresynapse.net/libraries/testLibraryName.jar", + "query": { + "api-version": "2019-06-01-preview" + }, + "requestBody": null, + "status": 200, + "response": "{\"id\":\"/subscriptions/faa080af-c1d8-40ad-9cce-e1a450ca5b57/resourceGroups/xiangyan/providers/Microsoft.Synapse/workspaces/xysynapsetest/libraries/testLibraryName.jar\",\"name\":\"testLibraryName.jar\",\"type\":\"Microsoft.Synapse/workspaces/libraries\",\"properties\":{\"name\":\"testLibraryName.jar\",\"path\":\"xysynapsetest/libraries/testLibraryName.jar\",\"containerName\":\"prep\",\"uploadedTimestamp\":\"2021-04-05T22:16:43.7395879+00:00\",\"type\":\"jar\",\"provisioningStatus\":\"Incomplete\",\"creatorId\":\"30511c9d-ba1a-4c7b-b422-5b543da11b3f\"},\"etag\":\"bc063721-0000-0800-0000-606b8c4f0000\"}", + "responseHeaders": { + "content-length": "564", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:16:54 GMT", + "server": "Microsoft-HTTPAPI/2.0", + "status": "200", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-ms-request-id": "397c699b-0004-4d55-a887-4b930f7f5a7a" + } + } + ], + "uniqueTestInfo": { + "uniqueName": {}, + "newDate": {} + }, + "hash": "6e145c7219e1399d561c1f17054bdb7e" +} \ No newline at end of file diff --git a/sdk/synapse/synapse-artifacts/recordings/browsers/library/recording_should_list_library.json b/sdk/synapse/synapse-artifacts/recordings/browsers/library/recording_should_list_library.json new file mode 100644 index 000000000000..7b42d70bb8a4 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/recordings/browsers/library/recording_should_list_library.json @@ -0,0 +1,50 @@ +{ + "recordings": [ + { + "method": "POST", + "url": "https://login.microsoftonline.com/88888888-8888-8888-8888-888888888888/oauth2/v2.0/token", + "query": {}, + "requestBody": "response_type=token&grant_type=client_credentials&client_id=azure_client_id&client_secret=azure_client_secret&scope=https%3A%2F%2Fdev.azuresynapse.net%2F.default", + "status": 200, + "response": "{\"token_type\":\"Bearer\",\"expires_in\":86399,\"ext_expires_in\":86399,\"access_token\":\"access_token\"}", + "responseHeaders": { + "cache-control": "no-store, no-cache", + "content-length": "1322", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:16:53 GMT", + "expires": "-1", + "p3p": "CP=\"DSP CUR OTPi IND OTRi ONL FIN\"", + "pragma": "no-cache", + "referrer-policy": "strict-origin-when-cross-origin", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-content-type-options": "nosniff", + "x-ms-ests-server": "2.1.11562.10 - EUS ProdSlices", + "x-ms-request-id": "90037ff6-ca38-45ae-85a0-41baaab67400" + } + }, + { + "method": "GET", + "url": "https://testaccount.dev.azuresynapse.net/libraries", + "query": { + "api-version": "2019-06-01-preview" + }, + "requestBody": null, + "status": 200, + "response": "{\"value\":[{\"id\":\"/subscriptions/faa080af-c1d8-40ad-9cce-e1a450ca5b57/resourceGroups/xiangyan/providers/Microsoft.Synapse/workspaces/xysynapsetest/libraries/testLibraryName.jar\",\"name\":\"testLibraryName.jar\",\"type\":\"Microsoft.Synapse/workspaces/libraries\",\"etag\":\"bc063721-0000-0800-0000-606b8c4f0000\",\"properties\":{\"name\":\"testLibraryName.jar\",\"path\":\"xysynapsetest/libraries/testLibraryName.jar\",\"containerName\":\"prep\",\"uploadedTimestamp\":\"2021-04-05T22:16:43.7395879+00:00\",\"type\":\"jar\",\"provisioningStatus\":\"Incomplete\",\"creatorId\":\"30511c9d-ba1a-4c7b-b422-5b543da11b3f\"}}]}", + "responseHeaders": { + "content-length": "576", + "content-type": "application/json; charset=utf-8", + "date": "Mon, 05 Apr 2021 22:16:54 GMT", + "server": "Microsoft-HTTPAPI/2.0", + "status": "200", + "strict-transport-security": "max-age=31536000; includeSubDomains", + "x-ms-request-id": "71782bb4-c24b-4498-b32e-c9c0d4ad5b37" + } + } + ], + "uniqueTestInfo": { + "uniqueName": {}, + "newDate": {} + }, + "hash": "f96e65c6ccfc8c2cc7518dd97f4757d9" +} \ No newline at end of file diff --git a/sdk/synapse/synapse-artifacts/recordings/node/bigdatapools/recording_should_get_a_bigdatapool_by_name.js b/sdk/synapse/synapse-artifacts/recordings/node/bigdatapools/recording_should_get_a_bigdatapool_by_name.js new file mode 100644 index 000000000000..5d2758323b7e --- /dev/null +++ b/sdk/synapse/synapse-artifacts/recordings/node/bigdatapools/recording_should_get_a_bigdatapool_by_name.js @@ -0,0 +1,54 @@ +let nock = require('nock'); + +module.exports.hash = "af4564dbbb65281d3928bfc9543ed8f9"; + +module.exports.testInfo = {"uniqueName":{},"newDate":{}} + +nock('https://login.microsoftonline.com:443', {"encodedQueryParams":true}) + .post('/88888888-8888-8888-8888-888888888888/oauth2/v2.0/token', "response_type=token&grant_type=client_credentials&client_id=azure_client_id&client_secret=azure_client_secret&scope=https%3A%2F%2Fdev.azuresynapse.net%2F.default") + .reply(200, {"token_type":"Bearer","expires_in":86399,"ext_expires_in":86399,"access_token":"access_token"}, [ 'Cache-Control', + 'no-store, no-cache', + 'Pragma', + 'no-cache', + 'Content-Type', + 'application/json; charset=utf-8', + 'Expires', + '-1', + 'Strict-Transport-Security', + 'max-age=31536000; includeSubDomains', + 'X-Content-Type-Options', + 'nosniff', + 'P3P', + 'CP="DSP CUR OTPi IND OTRi ONL FIN"', + 'x-ms-request-id', + 'b92bec63-8e10-4e25-8e97-94b12f636f00', + 'x-ms-ests-server', + '2.1.11562.10 - NCUS ProdSlices', + 'Set-Cookie', + 'fpc=AvzULwO9SbVOqJjwdF4-3lHKOuyWAgAAAKt3_dcOAAAA; expires=Wed, 05-May-2021 21:27:09 GMT; path=/; secure; HttpOnly; SameSite=None', + 'Set-Cookie', + 'x-ms-gateway-slice=estsfd; path=/; secure; samesite=none; httponly', + 'Set-Cookie', + 'stsservicecookie=estsfd; path=/; secure; samesite=none; httponly', + 'Date', + 'Mon, 05 Apr 2021 21:27:08 GMT', + 'Content-Length', + '1322' ]); + +nock('https://testaccount.dev.azuresynapse.net', {"encodedQueryParams":true}) + .get('/bigDataPools/testsparkpool') + .query(true) + .reply(200, {"properties":{"creationDate":"2020-11-18T19:10:21.6Z","sparkVersion":"2.4","nodeCount":10,"nodeSize":"Small","nodeSizeFamily":"MemoryOptimized","autoScale":{"enabled":true,"minNodeCount":3,"maxNodeCount":10},"autoPause":{"enabled":true,"delayInMinutes":15},"isComputeIsolationEnabled":false,"sessionLevelPackagesEnabled":false,"cacheSize":0,"dynamicExecutorAllocation":{"enabled":false},"lastSucceededTimestamp":"2020-11-18T19:10:25.52Z","provisioningState":"Succeeded"},"id":"/subscriptions/faa080af-c1d8-40ad-9cce-e1a450ca5b57/resourceGroups/xiangyan/providers/Microsoft.ProjectArcadia/workspaces/xysynapsetest/sparkComputes/testsparkpool","name":"testsparkpool","type":"Microsoft.ProjectArcadia/workspaces/sparkComputes","location":"westus2","tags":{}}, [ 'Content-Length', + '756', + 'Content-Type', + 'application/json; charset=utf-8', + 'Server', + 'Microsoft-HTTPAPI/2.0', + 'Strict-Transport-Security', + 'max-age=31536000; includeSubDomains', + 'x-ms-request-id', + '2df4e86b-71f4-4fd1-be11-b8736792d35d', + 'Date', + 'Mon, 05 Apr 2021 21:27:08 GMT', + 'Connection', + 'close' ]); diff --git a/sdk/synapse/synapse-artifacts/recordings/node/bigdatapools/recording_should_list_bigdatapools.js b/sdk/synapse/synapse-artifacts/recordings/node/bigdatapools/recording_should_list_bigdatapools.js new file mode 100644 index 000000000000..934b4a8b53f2 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/recordings/node/bigdatapools/recording_should_list_bigdatapools.js @@ -0,0 +1,54 @@ +let nock = require('nock'); + +module.exports.hash = "55452eb4bf13a77d3b08cff7e23bd65e"; + +module.exports.testInfo = {"uniqueName":{},"newDate":{}} + +nock('https://login.microsoftonline.com:443', {"encodedQueryParams":true}) + .post('/88888888-8888-8888-8888-888888888888/oauth2/v2.0/token', "response_type=token&grant_type=client_credentials&client_id=azure_client_id&client_secret=azure_client_secret&scope=https%3A%2F%2Fdev.azuresynapse.net%2F.default") + .reply(200, {"token_type":"Bearer","expires_in":86399,"ext_expires_in":86399,"access_token":"access_token"}, [ 'Cache-Control', + 'no-store, no-cache', + 'Pragma', + 'no-cache', + 'Content-Length', + '1322', + 'Content-Type', + 'application/json; charset=utf-8', + 'Expires', + '-1', + 'Strict-Transport-Security', + 'max-age=31536000; includeSubDomains', + 'X-Content-Type-Options', + 'nosniff', + 'P3P', + 'CP="DSP CUR OTPi IND OTRi ONL FIN"', + 'x-ms-request-id', + '83970648-3778-4a17-ab9b-6110b90a5900', + 'x-ms-ests-server', + '2.1.11562.10 - WUS2 ProdSlices', + 'Set-Cookie', + 'fpc=AvzULwO9SbVOqJjwdF4-3lHKOuyWAQAAAKt3_dcOAAAA; expires=Wed, 05-May-2021 21:27:08 GMT; path=/; secure; HttpOnly; SameSite=None', + 'Set-Cookie', + 'x-ms-gateway-slice=estsfd; path=/; secure; samesite=none; httponly', + 'Set-Cookie', + 'stsservicecookie=estsfd; path=/; secure; samesite=none; httponly', + 'Date', + 'Mon, 05 Apr 2021 21:27:07 GMT' ]); + +nock('https://testaccount.dev.azuresynapse.net', {"encodedQueryParams":true}) + .get('/bigDataPools') + .query(true) + .reply(200, {"value":[{"properties":{"creationDate":"2020-11-19T21:41:05.3233333Z","sparkVersion":"2.4","nodeCount":4,"nodeSize":"Small","nodeSizeFamily":"MemoryOptimized","autoScale":{"enabled":false,"minNodeCount":3,"maxNodeCount":10},"autoPause":{"enabled":true,"delayInMinutes":15},"isComputeIsolationEnabled":false,"sessionLevelPackagesEnabled":false,"cacheSize":0,"dynamicExecutorAllocation":{"enabled":false},"lastSucceededTimestamp":"2020-11-19T21:41:13.2833333Z","provisioningState":"Succeeded"},"id":"/subscriptions/faa080af-c1d8-40ad-9cce-e1a450ca5b57/resourceGroups/xiangyan/providers/Microsoft.ProjectArcadia/workspaces/xysynapsetest/sparkComputes/HamonsSpark","name":"HamonsSpark","type":"Microsoft.ProjectArcadia/workspaces/sparkComputes","location":"westus2","tags":{}},{"properties":{"creationDate":"2021-01-11T06:04:11.4733333Z","sparkVersion":"2.4","nodeCount":0,"nodeSize":"Small","nodeSizeFamily":"MemoryOptimized","autoScale":{"enabled":true,"minNodeCount":3,"maxNodeCount":10},"autoPause":{"enabled":true,"delayInMinutes":15},"isComputeIsolationEnabled":false,"sessionLevelPackagesEnabled":false,"cacheSize":0,"dynamicExecutorAllocation":{"enabled":false},"lastSucceededTimestamp":"2021-01-11T06:04:18.1666667Z","provisioningState":"Succeeded"},"id":"/subscriptions/faa080af-c1d8-40ad-9cce-e1a450ca5b57/resourceGroups/xiangyan/providers/Microsoft.ProjectArcadia/workspaces/xysynapsetest/sparkComputes/jianghaospool","name":"jianghaospool","type":"Microsoft.ProjectArcadia/workspaces/sparkComputes","location":"westus2","tags":{}},{"properties":{"creationDate":"2020-11-18T19:10:21.6Z","sparkVersion":"2.4","nodeCount":10,"nodeSize":"Small","nodeSizeFamily":"MemoryOptimized","autoScale":{"enabled":true,"minNodeCount":3,"maxNodeCount":10},"autoPause":{"enabled":true,"delayInMinutes":15},"isComputeIsolationEnabled":false,"sessionLevelPackagesEnabled":false,"cacheSize":0,"dynamicExecutorAllocation":{"enabled":false},"lastSucceededTimestamp":"2020-11-18T19:10:25.52Z","provisioningState":"Succeeded"},"id":"/subscriptions/faa080af-c1d8-40ad-9cce-e1a450ca5b57/resourceGroups/xiangyan/providers/Microsoft.ProjectArcadia/workspaces/xysynapsetest/sparkComputes/testsparkpool","name":"testsparkpool","type":"Microsoft.ProjectArcadia/workspaces/sparkComputes","location":"westus2","tags":{}}]}, [ 'Content-Length', + '2299', + 'Content-Type', + 'application/json; charset=utf-8', + 'Server', + 'Microsoft-HTTPAPI/2.0', + 'Strict-Transport-Security', + 'max-age=31536000; includeSubDomains', + 'x-ms-request-id', + 'eb4c3b10-fa24-4d38-99d4-c5becff0588b', + 'Date', + 'Mon, 05 Apr 2021 21:27:08 GMT', + 'Connection', + 'close' ]); diff --git a/sdk/synapse/synapse-artifacts/recordings/node/dataflow/recording_should_create_dataflow.js b/sdk/synapse/synapse-artifacts/recordings/node/dataflow/recording_should_create_dataflow.js new file mode 100644 index 000000000000..c6c1c4bb7336 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/recordings/node/dataflow/recording_should_create_dataflow.js @@ -0,0 +1,186 @@ +let nock = require('nock'); + +module.exports.hash = "1a294459aa6a96fcc6951d7a0ee0cd43"; + +module.exports.testInfo = {"uniqueName":{},"newDate":{}} + +nock('https://login.microsoftonline.com:443', {"encodedQueryParams":true}) + .post('/88888888-8888-8888-8888-888888888888/oauth2/v2.0/token', "response_type=token&grant_type=client_credentials&client_id=azure_client_id&client_secret=azure_client_secret&scope=https%3A%2F%2Fdev.azuresynapse.net%2F.default") + .reply(200, {"token_type":"Bearer","expires_in":86399,"ext_expires_in":86399,"access_token":"access_token"}, [ 'Cache-Control', + 'no-store, no-cache', + 'Pragma', + 'no-cache', + 'Content-Length', + '1322', + 'Content-Type', + 'application/json; charset=utf-8', + 'Expires', + '-1', + 'Strict-Transport-Security', + 'max-age=31536000; includeSubDomains', + 'X-Content-Type-Options', + 'nosniff', + 'P3P', + 'CP="DSP CUR OTPi IND OTRi ONL FIN"', + 'x-ms-request-id', + 'c336d7a6-677b-4fdf-af1c-a0b193486500', + 'x-ms-ests-server', + '2.1.11562.10 - SCUS ProdSlices', + 'Set-Cookie', + 'fpc=AvzULwO9SbVOqJjwdF4-3lHKOuyWAwAAAKt3_dcOAAAA; expires=Wed, 05-May-2021 21:27:09 GMT; path=/; secure; HttpOnly; SameSite=None', + 'Set-Cookie', + 'x-ms-gateway-slice=estsfd; path=/; secure; samesite=none; httponly', + 'Set-Cookie', + 'stsservicecookie=estsfd; path=/; secure; samesite=none; httponly', + 'Date', + 'Mon, 05 Apr 2021 21:27:08 GMT' ]); + +nock('https://testaccount.dev.azuresynapse.net', {"encodedQueryParams":true}) + .put('/dataflows/testdataflow', {"properties":{"type":"MappingDataFlow"}}) + .query(true) + .reply(202, {"id":"/subscriptions/faa080af-c1d8-40ad-9cce-e1a450ca5b57/resourceGroups/xiangyan/providers/Microsoft.Synapse/workspaces/xysynapsetest/dataflows/testdataflow","recordId":1439004,"state":"Creating","created":"2021-04-05T21:27:09.59Z","changed":"2021-04-05T21:27:09.59Z","type":"DataFlow","name":"testdataflow","operationId":"b565df5a-56b5-4f22-8ad1-ee6c11e1fa91","artifactId":"0525C732-319A-4843-8F0A-CD7F6892620B"}, [ 'Content-Length', + '415', + 'Content-Type', + 'application/json; charset=utf-8', + 'Location', + 'https://testaccount.dev.azuresynapse.net/operationResults/b565df5a-56b5-4f22-8ad1-ee6c11e1fa91?api-version=2019-06-01-preview', + 'Retry-After', + '10', + 'Server', + 'Microsoft-HTTPAPI/2.0', + 'Strict-Transport-Security', + 'max-age=31536000; includeSubDomains', + 'Access-Control-Allow-Headers', + 'Location', + 'Access-Control-Allow-Headers', + 'Retry-After', + 'Access-Control-Expose-Headers', + 'Location', + 'Access-Control-Expose-Headers', + 'Retry-After', + 'x-ms-request-id', + '2d110dd4-85cd-42e8-9eaf-8368407b3bdd', + 'Date', + 'Mon, 05 Apr 2021 21:27:09 GMT', + 'Connection', + 'close' ]); + +nock('https://testaccount.dev.azuresynapse.net', {"encodedQueryParams":true}) + .get('/operationResults/b565df5a-56b5-4f22-8ad1-ee6c11e1fa91') + .query(true) + .reply(202, {"status":"InProgress"}, [ 'Content-Length', + '23', + 'Content-Type', + 'application/json; charset=utf-8', + 'Location', + 'https://testaccount.dev.azuresynapse.net/operationResults/b565df5a-56b5-4f22-8ad1-ee6c11e1fa91?api-version=2019-06-01-preview', + 'Retry-After', + '10', + 'Server', + 'Microsoft-HTTPAPI/2.0', + 'Strict-Transport-Security', + 'max-age=31536000; includeSubDomains', + 'Access-Control-Allow-Headers', + 'Location', + 'Access-Control-Allow-Headers', + 'Retry-After', + 'Access-Control-Expose-Headers', + 'Location', + 'Access-Control-Expose-Headers', + 'Retry-After', + 'x-ms-request-id', + '13519335-5589-441c-8c2e-6ba914bf0df7', + 'Date', + 'Mon, 05 Apr 2021 21:27:09 GMT', + 'Connection', + 'close' ]); + +nock('https://testaccount.dev.azuresynapse.net', {"encodedQueryParams":true}) + .get('/operationResults/b565df5a-56b5-4f22-8ad1-ee6c11e1fa91') + .query(true) + .reply(202, {"status":"InProgress"}, [ 'Content-Length', + '23', + 'Content-Type', + 'application/json; charset=utf-8', + 'Location', + 'https://testaccount.dev.azuresynapse.net/operationResults/b565df5a-56b5-4f22-8ad1-ee6c11e1fa91?api-version=2019-06-01-preview', + 'Retry-After', + '10', + 'Server', + 'Microsoft-HTTPAPI/2.0', + 'Strict-Transport-Security', + 'max-age=31536000; includeSubDomains', + 'Access-Control-Allow-Headers', + 'Location', + 'Access-Control-Allow-Headers', + 'Retry-After', + 'Access-Control-Expose-Headers', + 'Location', + 'Access-Control-Expose-Headers', + 'Retry-After', + 'x-ms-request-id', + '95b261f1-db23-4e15-b4f2-9594ab25ec6c', + 'Date', + 'Mon, 05 Apr 2021 21:27:11 GMT', + 'Connection', + 'close' ]); + +nock('https://testaccount.dev.azuresynapse.net', {"encodedQueryParams":true}) + .get('/operationResults/b565df5a-56b5-4f22-8ad1-ee6c11e1fa91') + .query(true) + .reply(202, {"status":"InProgress"}, [ 'Content-Length', + '23', + 'Content-Type', + 'application/json; charset=utf-8', + 'Location', + 'https://testaccount.dev.azuresynapse.net/operationResults/b565df5a-56b5-4f22-8ad1-ee6c11e1fa91?api-version=2019-06-01-preview', + 'Retry-After', + '10', + 'Server', + 'Microsoft-HTTPAPI/2.0', + 'Strict-Transport-Security', + 'max-age=31536000; includeSubDomains', + 'Access-Control-Allow-Headers', + 'Location', + 'Access-Control-Allow-Headers', + 'Retry-After', + 'Access-Control-Expose-Headers', + 'Location', + 'Access-Control-Expose-Headers', + 'Retry-After', + 'x-ms-request-id', + '6d0b2cd6-43bc-4a20-8ec2-7b03bf030b28', + 'Date', + 'Mon, 05 Apr 2021 21:27:13 GMT', + 'Connection', + 'close' ]); + +nock('https://testaccount.dev.azuresynapse.net', {"encodedQueryParams":true}) + .get('/operationResults/b565df5a-56b5-4f22-8ad1-ee6c11e1fa91') + .query(true) + .reply(200, {"id":"/subscriptions/faa080af-c1d8-40ad-9cce-e1a450ca5b57/resourceGroups/xiangyan/providers/Microsoft.Synapse/workspaces/xysynapsetest/dataflows/testdataflow","name":"testdataflow","type":"Microsoft.Synapse/workspaces/dataflows","properties":{"type":"MappingDataFlow"},"etag":"aa0690f6-0000-0800-0000-606b80b20000"}, [ 'Cache-Control', + 'no-cache', + 'Pragma', + 'no-cache', + 'Content-Length', + '316', + 'Content-Type', + 'application/json; charset=utf-8', + 'Expires', + '-1', + 'Server', + 'Microsoft-IIS/10.0 Microsoft-HTTPAPI/2.0', + 'Strict-Transport-Security', + 'max-age=15724800; includeSubDomains', + 'x-ms-correlation-request-id', + '9689e57a-27fe-485d-8d48-00c81e9587f4', + 'X-Content-Type-Options', + 'nosniff', + 'X-Powered-By', + 'ASP.NET', + 'x-ms-request-id', + '45e66c5a-454f-4b4f-ba9e-e4701d2d585b', + 'Date', + 'Mon, 05 Apr 2021 21:27:15 GMT', + 'Connection', + 'close' ]); diff --git a/sdk/synapse/synapse-artifacts/recordings/node/dataflow/recording_should_delete_dataflow.js b/sdk/synapse/synapse-artifacts/recordings/node/dataflow/recording_should_delete_dataflow.js new file mode 100644 index 000000000000..abd21d5a7845 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/recordings/node/dataflow/recording_should_delete_dataflow.js @@ -0,0 +1,136 @@ +let nock = require('nock'); + +module.exports.hash = "490333286e74fcd776ada6c8bff08130"; + +module.exports.testInfo = {"uniqueName":{},"newDate":{}} + +nock('https://login.microsoftonline.com:443', {"encodedQueryParams":true}) + .post('/88888888-8888-8888-8888-888888888888/oauth2/v2.0/token', "response_type=token&grant_type=client_credentials&client_id=azure_client_id&client_secret=azure_client_secret&scope=https%3A%2F%2Fdev.azuresynapse.net%2F.default") + .reply(200, {"token_type":"Bearer","expires_in":86399,"ext_expires_in":86399,"access_token":"access_token"}, [ 'Cache-Control', + 'no-store, no-cache', + 'Pragma', + 'no-cache', + 'Content-Length', + '1322', + 'Content-Type', + 'application/json; charset=utf-8', + 'Expires', + '-1', + 'Strict-Transport-Security', + 'max-age=31536000; includeSubDomains', + 'X-Content-Type-Options', + 'nosniff', + 'P3P', + 'CP="DSP CUR OTPi IND OTRi ONL FIN"', + 'x-ms-request-id', + 'a4433f94-db9a-4a15-9895-e4714d926700', + 'x-ms-ests-server', + '2.1.11562.10 - NCUS ProdSlices', + 'Set-Cookie', + 'fpc=AvzULwO9SbVOqJjwdF4-3lHKOuyWBgAAAKt3_dcOAAAA; expires=Wed, 05-May-2021 21:27:35 GMT; path=/; secure; HttpOnly; SameSite=None', + 'Set-Cookie', + 'x-ms-gateway-slice=estsfd; path=/; secure; samesite=none; httponly', + 'Set-Cookie', + 'stsservicecookie=estsfd; path=/; secure; samesite=none; httponly', + 'Date', + 'Mon, 05 Apr 2021 21:27:34 GMT' ]); + +nock('https://testaccount.dev.azuresynapse.net', {"encodedQueryParams":true}) + .delete('/dataflows/testdataflow2') + .query(true) + .reply(202, {"id":"/subscriptions/faa080af-c1d8-40ad-9cce-e1a450ca5b57/resourceGroups/xiangyan/providers/Microsoft.Synapse/workspaces/xysynapsetest/dataflows/testdataflow2","recordId":0,"state":"Deleting","created":"0001-01-01T00:00:00","changed":"0001-01-01T00:00:00","type":"DataFlow","name":"testdataflow2","operationId":"058bdd6f-6c2e-4737-8ebd-b68e0e433c99"}, [ 'Content-Length', + '351', + 'Content-Type', + 'application/json; charset=utf-8', + 'Location', + 'https://testaccount.dev.azuresynapse.net/operationResults/058bdd6f-6c2e-4737-8ebd-b68e0e433c99?api-version=2019-06-01-preview', + 'Server', + 'Microsoft-HTTPAPI/2.0', + 'Strict-Transport-Security', + 'max-age=31536000; includeSubDomains', + 'Access-Control-Allow-Headers', + 'Location', + 'Access-Control-Expose-Headers', + 'Location', + 'x-ms-request-id', + '8ae95c00-0f0e-49cb-9be4-30c42227c7e6', + 'Date', + 'Mon, 05 Apr 2021 21:27:35 GMT', + 'Connection', + 'close' ]); + +nock('https://testaccount.dev.azuresynapse.net', {"encodedQueryParams":true}) + .get('/operationResults/058bdd6f-6c2e-4737-8ebd-b68e0e433c99') + .query(true) + .reply(202, {"status":"InProgress"}, [ 'Content-Length', + '23', + 'Content-Type', + 'application/json; charset=utf-8', + 'Location', + 'https://testaccount.dev.azuresynapse.net/operationResults/058bdd6f-6c2e-4737-8ebd-b68e0e433c99?api-version=2019-06-01-preview', + 'Retry-After', + '10', + 'Server', + 'Microsoft-HTTPAPI/2.0', + 'Strict-Transport-Security', + 'max-age=31536000; includeSubDomains', + 'Access-Control-Allow-Headers', + 'Location', + 'Access-Control-Allow-Headers', + 'Retry-After', + 'Access-Control-Expose-Headers', + 'Location', + 'Access-Control-Expose-Headers', + 'Retry-After', + 'x-ms-request-id', + 'adb92c04-4973-4865-8fa7-814f7398ebe8', + 'Date', + 'Mon, 05 Apr 2021 21:27:34 GMT', + 'Connection', + 'close' ]); + +nock('https://testaccount.dev.azuresynapse.net', {"encodedQueryParams":true}) + .get('/operationResults/058bdd6f-6c2e-4737-8ebd-b68e0e433c99') + .query(true) + .reply(202, {"status":"InProgress"}, [ 'Content-Length', + '23', + 'Content-Type', + 'application/json; charset=utf-8', + 'Location', + 'https://testaccount.dev.azuresynapse.net/operationResults/058bdd6f-6c2e-4737-8ebd-b68e0e433c99?api-version=2019-06-01-preview', + 'Retry-After', + '10', + 'Server', + 'Microsoft-HTTPAPI/2.0', + 'Strict-Transport-Security', + 'max-age=31536000; includeSubDomains', + 'Access-Control-Allow-Headers', + 'Location', + 'Access-Control-Allow-Headers', + 'Retry-After', + 'Access-Control-Expose-Headers', + 'Location', + 'Access-Control-Expose-Headers', + 'Retry-After', + 'x-ms-request-id', + '42c9aa45-77d1-450d-9e6a-6bfa00abd71b', + 'Date', + 'Mon, 05 Apr 2021 21:27:37 GMT', + 'Connection', + 'close' ]); + +nock('https://testaccount.dev.azuresynapse.net', {"encodedQueryParams":true}) + .get('/operationResults/058bdd6f-6c2e-4737-8ebd-b68e0e433c99') + .query(true) + .reply(200, "", [ 'Content-Length', + '0', + 'Server', + 'Microsoft-HTTPAPI/2.0', + 'Strict-Transport-Security', + 'max-age=31536000; includeSubDomains', + 'x-ms-request-id', + '7b6eca15-ef52-4cbe-a0e0-95a136201b44', + 'Date', + 'Mon, 05 Apr 2021 21:27:39 GMT', + 'Connection', + 'close' ]); diff --git a/sdk/synapse/synapse-artifacts/recordings/node/dataflow/recording_should_get_dataflow.js b/sdk/synapse/synapse-artifacts/recordings/node/dataflow/recording_should_get_dataflow.js new file mode 100644 index 000000000000..9d6689fec58f --- /dev/null +++ b/sdk/synapse/synapse-artifacts/recordings/node/dataflow/recording_should_get_dataflow.js @@ -0,0 +1,66 @@ +let nock = require('nock'); + +module.exports.hash = "a48efda58194d1a48887a8d4489176c5"; + +module.exports.testInfo = {"uniqueName":{},"newDate":{}} + +nock('https://login.microsoftonline.com:443', {"encodedQueryParams":true}) + .post('/88888888-8888-8888-8888-888888888888/oauth2/v2.0/token', "response_type=token&grant_type=client_credentials&client_id=azure_client_id&client_secret=azure_client_secret&scope=https%3A%2F%2Fdev.azuresynapse.net%2F.default") + .reply(200, {"token_type":"Bearer","expires_in":86399,"ext_expires_in":86399,"access_token":"access_token"}, [ 'Cache-Control', + 'no-store, no-cache', + 'Pragma', + 'no-cache', + 'Content-Length', + '1322', + 'Content-Type', + 'application/json; charset=utf-8', + 'Expires', + '-1', + 'Strict-Transport-Security', + 'max-age=31536000; includeSubDomains', + 'X-Content-Type-Options', + 'nosniff', + 'P3P', + 'CP="DSP CUR OTPi IND OTRi ONL FIN"', + 'x-ms-request-id', + '02b55794-95a4-4d5a-b2dd-ee688d0d7700', + 'x-ms-ests-server', + '2.1.11562.10 - EUS ProdSlices', + 'Set-Cookie', + 'fpc=AvzULwO9SbVOqJjwdF4-3lHKOuyWBQAAAKt3_dcOAAAA; expires=Wed, 05-May-2021 21:27:20 GMT; path=/; secure; HttpOnly; SameSite=None', + 'Set-Cookie', + 'x-ms-gateway-slice=estsfd; path=/; secure; samesite=none; httponly', + 'Set-Cookie', + 'stsservicecookie=estsfd; path=/; secure; samesite=none; httponly', + 'Date', + 'Mon, 05 Apr 2021 21:27:19 GMT' ]); + +nock('https://testaccount.dev.azuresynapse.net', {"encodedQueryParams":true}) + .get('/dataflows/testdataflow') + .query(true) + .reply(200, {"id":"/subscriptions/faa080af-c1d8-40ad-9cce-e1a450ca5b57/resourceGroups/xiangyan/providers/Microsoft.Synapse/workspaces/xysynapsetest/dataflows/testdataflow","name":"testdataflow","type":"Microsoft.Synapse/workspaces/dataflows","properties":{"type":"MappingDataFlow"},"etag":"aa0690f6-0000-0800-0000-606b80b20000"}, [ 'Cache-Control', + 'no-cache', + 'Pragma', + 'no-cache', + 'Content-Length', + '316', + 'Content-Type', + 'application/json; charset=utf-8', + 'Expires', + '-1', + 'Server', + 'Microsoft-IIS/10.0 Microsoft-HTTPAPI/2.0', + 'Strict-Transport-Security', + 'max-age=15724800; includeSubDomains', + 'x-ms-correlation-request-id', + '8d4177fb-751f-4194-9bd1-b06faaa7e0eb', + 'X-Content-Type-Options', + 'nosniff', + 'X-Powered-By', + 'ASP.NET', + 'x-ms-request-id', + '807f306a-e111-4817-9f03-787f0cde9c13', + 'Date', + 'Mon, 05 Apr 2021 21:27:20 GMT', + 'Connection', + 'close' ]); diff --git a/sdk/synapse/synapse-artifacts/recordings/node/dataflow/recording_should_list_dataflows.js b/sdk/synapse/synapse-artifacts/recordings/node/dataflow/recording_should_list_dataflows.js new file mode 100644 index 000000000000..84a6d8860981 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/recordings/node/dataflow/recording_should_list_dataflows.js @@ -0,0 +1,54 @@ +let nock = require('nock'); + +module.exports.hash = "df3f91c76ecc1e578504b234a98a3f8b"; + +module.exports.testInfo = {"uniqueName":{},"newDate":{}} + +nock('https://login.microsoftonline.com:443', {"encodedQueryParams":true}) + .post('/88888888-8888-8888-8888-888888888888/oauth2/v2.0/token', "response_type=token&grant_type=client_credentials&client_id=azure_client_id&client_secret=azure_client_secret&scope=https%3A%2F%2Fdev.azuresynapse.net%2F.default") + .reply(200, {"token_type":"Bearer","expires_in":86399,"ext_expires_in":86399,"access_token":"access_token"}, [ 'Cache-Control', + 'no-store, no-cache', + 'Pragma', + 'no-cache', + 'Content-Length', + '1322', + 'Content-Type', + 'application/json; charset=utf-8', + 'Expires', + '-1', + 'Strict-Transport-Security', + 'max-age=31536000; includeSubDomains', + 'X-Content-Type-Options', + 'nosniff', + 'P3P', + 'CP="DSP CUR OTPi IND OTRi ONL FIN"', + 'x-ms-request-id', + 'd729a157-e25e-4b17-971b-e0830b006d00', + 'x-ms-ests-server', + '2.1.11562.10 - SCUS ProdSlices', + 'Set-Cookie', + 'fpc=AvzULwO9SbVOqJjwdF4-3lHKOuyWBAAAAKt3_dcOAAAA; expires=Wed, 05-May-2021 21:27:20 GMT; path=/; secure; HttpOnly; SameSite=None', + 'Set-Cookie', + 'x-ms-gateway-slice=estsfd; path=/; secure; samesite=none; httponly', + 'Set-Cookie', + 'stsservicecookie=estsfd; path=/; secure; samesite=none; httponly', + 'Date', + 'Mon, 05 Apr 2021 21:27:19 GMT' ]); + +nock('https://testaccount.dev.azuresynapse.net', {"encodedQueryParams":true}) + .get('/dataflows') + .query(true) + .reply(200, {"value":[{"id":"/subscriptions/faa080af-c1d8-40ad-9cce-e1a450ca5b57/resourceGroups/xiangyan/providers/Microsoft.Synapse/workspaces/xysynapsetest/dataflows/testdataflow","name":"testdataflow","type":"Microsoft.Synapse/workspaces/dataflows","etag":"aa0690f6-0000-0800-0000-606b80b20000","properties":{"type":"MappingDataFlow"}}]}, [ 'Content-Length', + '328', + 'Content-Type', + 'application/json; charset=utf-8', + 'Server', + 'Microsoft-HTTPAPI/2.0', + 'Strict-Transport-Security', + 'max-age=31536000; includeSubDomains', + 'x-ms-request-id', + '9ab5fe4e-c7f3-454e-96b7-cacc5082fdac', + 'Date', + 'Mon, 05 Apr 2021 21:27:19 GMT', + 'Connection', + 'close' ]); diff --git a/sdk/synapse/synapse-artifacts/recordings/node/dataflow/recording_should_rename_dataflow.js b/sdk/synapse/synapse-artifacts/recordings/node/dataflow/recording_should_rename_dataflow.js new file mode 100644 index 000000000000..f73edd9b6f11 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/recordings/node/dataflow/recording_should_rename_dataflow.js @@ -0,0 +1,232 @@ +let nock = require('nock'); + +module.exports.hash = "1f2f2307916638a93d675fb16195e53f"; + +module.exports.testInfo = {"uniqueName":{},"newDate":{}} + +nock('https://login.microsoftonline.com:443', {"encodedQueryParams":true}) + .post('/88888888-8888-8888-8888-888888888888/oauth2/v2.0/token', "response_type=token&grant_type=client_credentials&client_id=azure_client_id&client_secret=azure_client_secret&scope=https%3A%2F%2Fdev.azuresynapse.net%2F.default") + .reply(200, {"token_type":"Bearer","expires_in":86399,"ext_expires_in":86399,"access_token":"access_token"}, [ 'Cache-Control', + 'no-store, no-cache', + 'Pragma', + 'no-cache', + 'Content-Type', + 'application/json; charset=utf-8', + 'Expires', + '-1', + 'Strict-Transport-Security', + 'max-age=31536000; includeSubDomains', + 'X-Content-Type-Options', + 'nosniff', + 'P3P', + 'CP="DSP CUR OTPi IND OTRi ONL FIN"', + 'x-ms-request-id', + 'c336d7a6-677b-4fdf-af1c-a0b1044a6500', + 'x-ms-ests-server', + '2.1.11562.10 - SCUS ProdSlices', + 'Set-Cookie', + 'fpc=AvzULwO9SbVOqJjwdF4-3lHKOuyWBQAAAKt3_dcOAAAA; expires=Wed, 05-May-2021 21:27:20 GMT; path=/; secure; HttpOnly; SameSite=None', + 'Set-Cookie', + 'x-ms-gateway-slice=estsfd; path=/; secure; samesite=none; httponly', + 'Set-Cookie', + 'stsservicecookie=estsfd; path=/; secure; samesite=none; httponly', + 'Date', + 'Mon, 05 Apr 2021 21:27:19 GMT', + 'Content-Length', + '1322' ]); + +nock('https://testaccount.dev.azuresynapse.net', {"encodedQueryParams":true}) + .post('/dataflows/testdataflow/rename', {"newName":"testdataflow2"}) + .query(true) + .reply(202, {"id":"/subscriptions/faa080af-c1d8-40ad-9cce-e1a450ca5b57/resourceGroups/xiangyan/providers/Microsoft.Synapse/workspaces/xysynapsetest/dataflows/testdataflow","recordId":1439004,"state":"Renaming","created":"2021-04-05T21:27:09.59Z","changed":"2021-04-05T21:27:14.5966667Z","type":"DataFlow","name":"testdataflow","operationId":"0f09b77e-2184-43de-9f6a-84b3fea735f5","artifactId":"0525C732-319A-4843-8F0A-CD7F6892620B"}, [ 'Content-Length', + '420', + 'Content-Type', + 'application/json; charset=utf-8', + 'Location', + 'https://testaccount.dev.azuresynapse.net/operationResults/0f09b77e-2184-43de-9f6a-84b3fea735f5?api-version=2019-06-01-preview', + 'Retry-After', + '10', + 'Server', + 'Microsoft-HTTPAPI/2.0', + 'Strict-Transport-Security', + 'max-age=31536000; includeSubDomains', + 'Access-Control-Allow-Headers', + 'Location', + 'Access-Control-Allow-Headers', + 'Retry-After', + 'Access-Control-Expose-Headers', + 'Location', + 'Access-Control-Expose-Headers', + 'Retry-After', + 'x-ms-request-id', + '0f5d4f7c-8f2d-4905-9f2a-52fc5874efd7', + 'Date', + 'Mon, 05 Apr 2021 21:27:20 GMT', + 'Connection', + 'close' ]); + +nock('https://testaccount.dev.azuresynapse.net', {"encodedQueryParams":true}) + .get('/operationResults/0f09b77e-2184-43de-9f6a-84b3fea735f5') + .query(true) + .reply(202, {"status":"InProgress"}, [ 'Content-Length', + '23', + 'Content-Type', + 'application/json; charset=utf-8', + 'Location', + 'https://testaccount.dev.azuresynapse.net/operationResults/0f09b77e-2184-43de-9f6a-84b3fea735f5?api-version=2019-06-01-preview', + 'Retry-After', + '10', + 'Server', + 'Microsoft-HTTPAPI/2.0', + 'Strict-Transport-Security', + 'max-age=31536000; includeSubDomains', + 'Access-Control-Allow-Headers', + 'Location', + 'Access-Control-Allow-Headers', + 'Retry-After', + 'Access-Control-Expose-Headers', + 'Location', + 'Access-Control-Expose-Headers', + 'Retry-After', + 'x-ms-request-id', + '218746f7-fdff-49f9-afed-3f094e6cd533', + 'Date', + 'Mon, 05 Apr 2021 21:27:20 GMT', + 'Connection', + 'close' ]); + +nock('https://testaccount.dev.azuresynapse.net', {"encodedQueryParams":true}) + .get('/operationResults/0f09b77e-2184-43de-9f6a-84b3fea735f5') + .query(true) + .reply(202, {"status":"InProgress"}, [ 'Content-Length', + '23', + 'Content-Type', + 'application/json; charset=utf-8', + 'Location', + 'https://testaccount.dev.azuresynapse.net/operationResults/0f09b77e-2184-43de-9f6a-84b3fea735f5?api-version=2019-06-01-preview', + 'Retry-After', + '10', + 'Server', + 'Microsoft-HTTPAPI/2.0', + 'Strict-Transport-Security', + 'max-age=31536000; includeSubDomains', + 'Access-Control-Allow-Headers', + 'Location', + 'Access-Control-Allow-Headers', + 'Retry-After', + 'Access-Control-Expose-Headers', + 'Location', + 'Access-Control-Expose-Headers', + 'Retry-After', + 'x-ms-request-id', + '3d748a73-6b20-40b0-bdc1-e5d6fcc9b727', + 'Date', + 'Mon, 05 Apr 2021 21:27:22 GMT', + 'Connection', + 'close' ]); + +nock('https://testaccount.dev.azuresynapse.net', {"encodedQueryParams":true}) + .get('/operationResults/0f09b77e-2184-43de-9f6a-84b3fea735f5') + .query(true) + .reply(202, {"status":"InProgress"}, [ 'Content-Length', + '23', + 'Content-Type', + 'application/json; charset=utf-8', + 'Location', + 'https://testaccount.dev.azuresynapse.net/operationResults/0f09b77e-2184-43de-9f6a-84b3fea735f5?api-version=2019-06-01-preview', + 'Retry-After', + '10', + 'Server', + 'Microsoft-HTTPAPI/2.0', + 'Strict-Transport-Security', + 'max-age=31536000; includeSubDomains', + 'Access-Control-Allow-Headers', + 'Location', + 'Access-Control-Allow-Headers', + 'Retry-After', + 'Access-Control-Expose-Headers', + 'Location', + 'Access-Control-Expose-Headers', + 'Retry-After', + 'x-ms-request-id', + '83b10ad1-b64b-4bc2-9142-85aaed812ccc', + 'Date', + 'Mon, 05 Apr 2021 21:27:24 GMT', + 'Connection', + 'close' ]); + +nock('https://testaccount.dev.azuresynapse.net', {"encodedQueryParams":true}) + .get('/operationResults/0f09b77e-2184-43de-9f6a-84b3fea735f5') + .query(true) + .reply(202, {"status":"InProgress"}, [ 'Content-Length', + '23', + 'Content-Type', + 'application/json; charset=utf-8', + 'Location', + 'https://testaccount.dev.azuresynapse.net/operationResults/0f09b77e-2184-43de-9f6a-84b3fea735f5?api-version=2019-06-01-preview', + 'Retry-After', + '10', + 'Server', + 'Microsoft-HTTPAPI/2.0', + 'Strict-Transport-Security', + 'max-age=31536000; includeSubDomains', + 'Access-Control-Allow-Headers', + 'Location', + 'Access-Control-Allow-Headers', + 'Retry-After', + 'Access-Control-Expose-Headers', + 'Location', + 'Access-Control-Expose-Headers', + 'Retry-After', + 'x-ms-request-id', + '1335cf98-f0ed-481a-b2ba-cef804871f75', + 'Date', + 'Mon, 05 Apr 2021 21:27:26 GMT', + 'Connection', + 'close' ]); + +nock('https://testaccount.dev.azuresynapse.net', {"encodedQueryParams":true}) + .get('/operationResults/0f09b77e-2184-43de-9f6a-84b3fea735f5') + .query(true) + .reply(202, {"status":"InProgress"}, [ 'Content-Length', + '23', + 'Content-Type', + 'application/json; charset=utf-8', + 'Location', + 'https://testaccount.dev.azuresynapse.net/operationResults/0f09b77e-2184-43de-9f6a-84b3fea735f5?api-version=2019-06-01-preview', + 'Retry-After', + '10', + 'Server', + 'Microsoft-HTTPAPI/2.0', + 'Strict-Transport-Security', + 'max-age=31536000; includeSubDomains', + 'Access-Control-Allow-Headers', + 'Location', + 'Access-Control-Allow-Headers', + 'Retry-After', + 'Access-Control-Expose-Headers', + 'Location', + 'Access-Control-Expose-Headers', + 'Retry-After', + 'x-ms-request-id', + '441f33ce-5f21-42e2-92fe-45dfd0c6bc3a', + 'Date', + 'Mon, 05 Apr 2021 21:27:28 GMT', + 'Connection', + 'close' ]); + +nock('https://testaccount.dev.azuresynapse.net', {"encodedQueryParams":true}) + .get('/operationResults/0f09b77e-2184-43de-9f6a-84b3fea735f5') + .query(true) + .reply(200, "", [ 'Content-Length', + '0', + 'Server', + 'Microsoft-HTTPAPI/2.0', + 'Strict-Transport-Security', + 'max-age=31536000; includeSubDomains', + 'x-ms-request-id', + '3e341d4d-86e2-4b8d-9f93-d41e64aa147a', + 'Date', + 'Mon, 05 Apr 2021 21:27:30 GMT', + 'Connection', + 'close' ]); diff --git a/sdk/synapse/synapse-artifacts/recordings/node/integrationruntimes/recording_should_get_integrationruntimes.js b/sdk/synapse/synapse-artifacts/recordings/node/integrationruntimes/recording_should_get_integrationruntimes.js new file mode 100644 index 000000000000..ff8ae6109880 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/recordings/node/integrationruntimes/recording_should_get_integrationruntimes.js @@ -0,0 +1,66 @@ +let nock = require('nock'); + +module.exports.hash = "a76192f8cc28a6cfa0b708eda3e6595d"; + +module.exports.testInfo = {"uniqueName":{},"newDate":{}} + +nock('https://login.microsoftonline.com:443', {"encodedQueryParams":true}) + .post('/88888888-8888-8888-8888-888888888888/oauth2/v2.0/token', "response_type=token&grant_type=client_credentials&client_id=azure_client_id&client_secret=azure_client_secret&scope=https%3A%2F%2Fdev.azuresynapse.net%2F.default") + .reply(200, {"token_type":"Bearer","expires_in":86399,"ext_expires_in":86399,"access_token":"access_token"}, [ 'Cache-Control', + 'no-store, no-cache', + 'Pragma', + 'no-cache', + 'Content-Type', + 'application/json; charset=utf-8', + 'Expires', + '-1', + 'Strict-Transport-Security', + 'max-age=31536000; includeSubDomains', + 'X-Content-Type-Options', + 'nosniff', + 'P3P', + 'CP="DSP CUR OTPi IND OTRi ONL FIN"', + 'x-ms-request-id', + '97fd9ab5-5452-404d-a5e4-e30b8bcc7100', + 'x-ms-ests-server', + '2.1.11562.10 - EUS ProdSlices', + 'Set-Cookie', + 'fpc=AvzULwO9SbVOqJjwdF4-3lHKOuyWBwAAAKt3_dcOAAAA; expires=Wed, 05-May-2021 21:27:43 GMT; path=/; secure; HttpOnly; SameSite=None', + 'Set-Cookie', + 'x-ms-gateway-slice=estsfd; path=/; secure; samesite=none; httponly', + 'Set-Cookie', + 'stsservicecookie=estsfd; path=/; secure; samesite=none; httponly', + 'Date', + 'Mon, 05 Apr 2021 21:27:42 GMT', + 'Content-Length', + '1322' ]); + +nock('https://testaccount.dev.azuresynapse.net', {"encodedQueryParams":true}) + .get('/integrationRuntimes/AutoResolveIntegrationRuntime') + .query(true) + .reply(200, {"id":"/subscriptions/faa080af-c1d8-40ad-9cce-e1a450ca5b57/resourceGroups/xiangyan/providers/Microsoft.Synapse/workspaces/xysynapsetest/integrationruntimes/AutoResolveIntegrationRuntime","name":"AutoResolveIntegrationRuntime","type":"Microsoft.Synapse/workspaces/integrationruntimes","properties":{"type":"Managed","typeProperties":{"computeProperties":{"location":"AutoResolve"}},"provisioningState":null},"etag":"3801cded-0000-0800-0000-5fa34e1b0000"}, [ 'Cache-Control', + 'no-cache', + 'Pragma', + 'no-cache', + 'Content-Length', + '453', + 'Content-Type', + 'application/json; charset=utf-8', + 'Expires', + '-1', + 'Server', + 'Microsoft-IIS/10.0 Microsoft-HTTPAPI/2.0', + 'Strict-Transport-Security', + 'max-age=15724800; includeSubDomains', + 'x-ms-correlation-request-id', + 'ae26350b-f89f-4269-80bc-f4b9b6ae3e0f', + 'X-Content-Type-Options', + 'nosniff', + 'X-Powered-By', + 'ASP.NET', + 'x-ms-request-id', + '10b6e6f4-8215-44f0-9845-a44077ebb55a', + 'Date', + 'Mon, 05 Apr 2021 21:27:43 GMT', + 'Connection', + 'close' ]); diff --git a/sdk/synapse/synapse-artifacts/recordings/node/integrationruntimes/recording_should_list_integrationruntimes.js b/sdk/synapse/synapse-artifacts/recordings/node/integrationruntimes/recording_should_list_integrationruntimes.js new file mode 100644 index 000000000000..c99cab33e733 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/recordings/node/integrationruntimes/recording_should_list_integrationruntimes.js @@ -0,0 +1,66 @@ +let nock = require('nock'); + +module.exports.hash = "9eafc336fd1af240271ad6f64ef0ef5d"; + +module.exports.testInfo = {"uniqueName":{},"newDate":{}} + +nock('https://login.microsoftonline.com:443', {"encodedQueryParams":true}) + .post('/88888888-8888-8888-8888-888888888888/oauth2/v2.0/token', "response_type=token&grant_type=client_credentials&client_id=azure_client_id&client_secret=azure_client_secret&scope=https%3A%2F%2Fdev.azuresynapse.net%2F.default") + .reply(200, {"token_type":"Bearer","expires_in":86399,"ext_expires_in":86399,"access_token":"access_token"}, [ 'Cache-Control', + 'no-store, no-cache', + 'Pragma', + 'no-cache', + 'Content-Type', + 'application/json; charset=utf-8', + 'Expires', + '-1', + 'Strict-Transport-Security', + 'max-age=31536000; includeSubDomains', + 'X-Content-Type-Options', + 'nosniff', + 'P3P', + 'CP="DSP CUR OTPi IND OTRi ONL FIN"', + 'x-ms-request-id', + 'c336d7a6-677b-4fdf-af1c-a0b1ea4c6500', + 'x-ms-ests-server', + '2.1.11562.10 - SCUS ProdSlices', + 'Set-Cookie', + 'fpc=AvzULwO9SbVOqJjwdF4-3lHKOuyWBgAAAKt3_dcOAAAA; expires=Wed, 05-May-2021 21:27:43 GMT; path=/; secure; HttpOnly; SameSite=None', + 'Set-Cookie', + 'x-ms-gateway-slice=estsfd; path=/; secure; samesite=none; httponly', + 'Set-Cookie', + 'stsservicecookie=estsfd; path=/; secure; samesite=none; httponly', + 'Date', + 'Mon, 05 Apr 2021 21:27:42 GMT', + 'Content-Length', + '1322' ]); + +nock('https://testaccount.dev.azuresynapse.net', {"encodedQueryParams":true}) + .get('/integrationRuntimes') + .query(true) + .reply(200, {"value":[{"id":"/subscriptions/faa080af-c1d8-40ad-9cce-e1a450ca5b57/resourceGroups/xiangyan/providers/Microsoft.Synapse/workspaces/xysynapsetest/integrationruntimes/AutoResolveIntegrationRuntime","name":"AutoResolveIntegrationRuntime","type":"Microsoft.Synapse/workspaces/integrationruntimes","properties":{"type":"Managed","typeProperties":{"computeProperties":{"location":"AutoResolve"}},"provisioningState":null},"etag":"3801cded-0000-0800-0000-5fa34e1b0000"}]}, [ 'Cache-Control', + 'no-cache', + 'Pragma', + 'no-cache', + 'Content-Length', + '465', + 'Content-Type', + 'application/json; charset=utf-8', + 'Expires', + '-1', + 'Server', + 'Microsoft-IIS/10.0 Microsoft-HTTPAPI/2.0', + 'Strict-Transport-Security', + 'max-age=15724800; includeSubDomains', + 'x-ms-correlation-request-id', + '7c5ae788-f842-4b67-aaa1-c6b45c5895e3', + 'X-Content-Type-Options', + 'nosniff', + 'X-Powered-By', + 'ASP.NET', + 'x-ms-request-id', + '996ff99f-49ff-4808-a12e-6feb5f381d42', + 'Date', + 'Mon, 05 Apr 2021 21:27:43 GMT', + 'Connection', + 'close' ]); diff --git a/sdk/synapse/synapse-artifacts/recordings/node/library/recording_should_create_library.js b/sdk/synapse/synapse-artifacts/recordings/node/library/recording_should_create_library.js new file mode 100644 index 000000000000..9d07b1c7b6cf --- /dev/null +++ b/sdk/synapse/synapse-artifacts/recordings/node/library/recording_should_create_library.js @@ -0,0 +1,114 @@ +let nock = require('nock'); + +module.exports.hash = "0755bcc327b2bd84e4d7fe604e27e115"; + +module.exports.testInfo = {"uniqueName":{},"newDate":{}} + +nock('https://login.microsoftonline.com:443', {"encodedQueryParams":true}) + .post('/88888888-8888-8888-8888-888888888888/oauth2/v2.0/token', "response_type=token&grant_type=client_credentials&client_id=azure_client_id&client_secret=azure_client_secret&scope=https%3A%2F%2Fdev.azuresynapse.net%2F.default") + .reply(200, {"token_type":"Bearer","expires_in":86399,"ext_expires_in":86399,"access_token":"access_token"}, [ 'Cache-Control', + 'no-store, no-cache', + 'Pragma', + 'no-cache', + 'Content-Length', + '1322', + 'Content-Type', + 'application/json; charset=utf-8', + 'Expires', + '-1', + 'Strict-Transport-Security', + 'max-age=31536000; includeSubDomains', + 'X-Content-Type-Options', + 'nosniff', + 'P3P', + 'CP="DSP CUR OTPi IND OTRi ONL FIN"', + 'x-ms-request-id', + '1e41d4e7-e4df-49b4-bf2d-31027fe15b00', + 'x-ms-ests-server', + '2.1.11562.10 - WUS2 ProdSlices', + 'Set-Cookie', + 'fpc=AvzULwO9SbVOqJjwdF4-3lHKOuyWCAAAAKt3_dcOAAAA; expires=Wed, 05-May-2021 21:27:43 GMT; path=/; secure; HttpOnly; SameSite=None', + 'Set-Cookie', + 'x-ms-gateway-slice=estsfd; path=/; secure; samesite=none; httponly', + 'Set-Cookie', + 'stsservicecookie=estsfd; path=/; secure; samesite=none; httponly', + 'Date', + 'Mon, 05 Apr 2021 21:27:42 GMT' ]); + +nock('https://testaccount.dev.azuresynapse.net', {"encodedQueryParams":true}) + .put('/libraries/testLibraryName.jar') + .query(true) + .reply(202, {"id":"/subscriptions/faa080af-c1d8-40ad-9cce-e1a450ca5b57/resourceGroups/xiangyan/providers/Microsoft.Synapse/workspaces/xysynapsetest/libraries/testLibraryName.jar","recordId":1439010,"state":"Creating","created":"2021-04-05T21:27:43.8633333Z","changed":"2021-04-05T21:27:43.8633333Z","type":"LibraryArtifact","name":"testLibraryName.jar","operationId":"8dd1d988-5503-484a-823c-d7b10cf04630","artifactId":"D369760A-876F-48AC-9F0D-8DA91909491A"}, [ 'Content-Length', + '446', + 'Content-Type', + 'application/json; charset=utf-8', + 'Location', + 'https://testaccount.dev.azuresynapse.net/libraryOperationResults/8dd1d988-5503-484a-823c-d7b10cf04630?api-version=2019-06-01-preview', + 'Retry-After', + '10', + 'Server', + 'Microsoft-HTTPAPI/2.0', + 'Strict-Transport-Security', + 'max-age=31536000; includeSubDomains', + 'Access-Control-Allow-Headers', + 'Location', + 'Access-Control-Allow-Headers', + 'Retry-After', + 'Access-Control-Expose-Headers', + 'Location', + 'Access-Control-Expose-Headers', + 'Retry-After', + 'x-ms-request-id', + 'c5f73729-2d67-4495-ac08-8143e3585e23', + 'Date', + 'Mon, 05 Apr 2021 21:27:43 GMT', + 'Connection', + 'close' ]); + +nock('https://testaccount.dev.azuresynapse.net', {"encodedQueryParams":true}) + .get('/libraryOperationResults/8dd1d988-5503-484a-823c-d7b10cf04630') + .query(true) + .reply(202, {"status":"InProgress"}, [ 'Content-Length', + '23', + 'Content-Type', + 'application/json; charset=utf-8', + 'Location', + 'https://testaccount.dev.azuresynapse.net/libraryOperationResults/8dd1d988-5503-484a-823c-d7b10cf04630?api-version=2019-06-01-preview', + 'Retry-After', + '10', + 'Server', + 'Microsoft-HTTPAPI/2.0', + 'Strict-Transport-Security', + 'max-age=31536000; includeSubDomains', + 'Access-Control-Allow-Headers', + 'Location', + 'Access-Control-Allow-Headers', + 'Retry-After', + 'Access-Control-Expose-Headers', + 'Location', + 'Access-Control-Expose-Headers', + 'Retry-After', + 'x-ms-request-id', + '456e7cc6-bba8-4aa0-b74a-9a5654b65fb6', + 'Date', + 'Mon, 05 Apr 2021 21:27:43 GMT', + 'Connection', + 'close' ]); + +nock('https://testaccount.dev.azuresynapse.net', {"encodedQueryParams":true}) + .get('/libraryOperationResults/8dd1d988-5503-484a-823c-d7b10cf04630') + .query(true) + .reply(200, {"id":"/subscriptions/faa080af-c1d8-40ad-9cce-e1a450ca5b57/resourceGroups/xiangyan/providers/Microsoft.Synapse/workspaces/xysynapsetest/libraries/testLibraryName.jar","name":"testLibraryName.jar","type":"Microsoft.Synapse/workspaces/libraries","properties":{"name":"testLibraryName.jar","path":"xysynapsetest/libraries/testLibraryName.jar","containerName":"prep","uploadedTimestamp":"2021-04-05T21:27:43.855845+00:00","type":"jar","provisioningStatus":"Incomplete","creatorId":"30511c9d-ba1a-4c7b-b422-5b543da11b3f"},"etag":"ab06012b-0000-0800-0000-606b80d10000"}, [ 'Content-Length', + '563', + 'Content-Type', + 'application/json; charset=utf-8', + 'Server', + 'Microsoft-HTTPAPI/2.0', + 'Strict-Transport-Security', + 'max-age=31536000; includeSubDomains', + 'x-ms-request-id', + 'b4becad3-5690-4fce-aade-cece3b2ce633', + 'Date', + 'Mon, 05 Apr 2021 21:27:46 GMT', + 'Connection', + 'close' ]); diff --git a/sdk/synapse/synapse-artifacts/recordings/node/library/recording_should_delete_library.js b/sdk/synapse/synapse-artifacts/recordings/node/library/recording_should_delete_library.js new file mode 100644 index 000000000000..977b097052c5 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/recordings/node/library/recording_should_delete_library.js @@ -0,0 +1,136 @@ +let nock = require('nock'); + +module.exports.hash = "92351ab8778cd53ffb22406c58a1c210"; + +module.exports.testInfo = {"uniqueName":{},"newDate":{}} + +nock('https://login.microsoftonline.com:443', {"encodedQueryParams":true}) + .post('/88888888-8888-8888-8888-888888888888/oauth2/v2.0/token', "response_type=token&grant_type=client_credentials&client_id=azure_client_id&client_secret=azure_client_secret&scope=https%3A%2F%2Fdev.azuresynapse.net%2F.default") + .reply(200, {"token_type":"Bearer","expires_in":86399,"ext_expires_in":86399,"access_token":"access_token"}, [ 'Cache-Control', + 'no-store, no-cache', + 'Pragma', + 'no-cache', + 'Content-Length', + '1322', + 'Content-Type', + 'application/json; charset=utf-8', + 'Expires', + '-1', + 'Strict-Transport-Security', + 'max-age=31536000; includeSubDomains', + 'X-Content-Type-Options', + 'nosniff', + 'P3P', + 'CP="DSP CUR OTPi IND OTRi ONL FIN"', + 'x-ms-request-id', + 'f4eccdd0-28c7-46ad-829f-33011dcd6500', + 'x-ms-ests-server', + '2.1.11562.10 - EUS ProdSlices', + 'Set-Cookie', + 'fpc=AvzULwO9SbVOqJjwdF4-3lHKOuyWCgAAAKt3_dcOAAAA; expires=Wed, 05-May-2021 21:27:50 GMT; path=/; secure; HttpOnly; SameSite=None', + 'Set-Cookie', + 'x-ms-gateway-slice=estsfd; path=/; secure; samesite=none; httponly', + 'Set-Cookie', + 'stsservicecookie=estsfd; path=/; secure; samesite=none; httponly', + 'Date', + 'Mon, 05 Apr 2021 21:27:49 GMT' ]); + +nock('https://testaccount.dev.azuresynapse.net', {"encodedQueryParams":true}) + .delete('/libraries/testLibraryName.jar') + .query(true) + .reply(202, {"id":"/subscriptions/faa080af-c1d8-40ad-9cce-e1a450ca5b57/resourceGroups/xiangyan/providers/Microsoft.Synapse/workspaces/xysynapsetest/libraries/testLibraryName.jar","recordId":0,"state":"Deleting","created":"0001-01-01T00:00:00","changed":"0001-01-01T00:00:00","type":"LibraryArtifact","name":"testLibraryName.jar","operationId":"70976408-41ab-4baf-877c-435c95add06a"}, [ 'Content-Length', + '370', + 'Content-Type', + 'application/json; charset=utf-8', + 'Location', + 'https://testaccount.dev.azuresynapse.net/libraryOperationResults/70976408-41ab-4baf-877c-435c95add06a?api-version=2019-06-01-preview', + 'Server', + 'Microsoft-HTTPAPI/2.0', + 'Strict-Transport-Security', + 'max-age=31536000; includeSubDomains', + 'Access-Control-Allow-Headers', + 'Location', + 'Access-Control-Expose-Headers', + 'Location', + 'x-ms-request-id', + 'b84bb98a-d66a-4ca6-a126-e13519fe0ca7', + 'Date', + 'Mon, 05 Apr 2021 21:27:49 GMT', + 'Connection', + 'close' ]); + +nock('https://testaccount.dev.azuresynapse.net', {"encodedQueryParams":true}) + .get('/libraryOperationResults/70976408-41ab-4baf-877c-435c95add06a') + .query(true) + .reply(202, {"status":"InProgress"}, [ 'Content-Length', + '23', + 'Content-Type', + 'application/json; charset=utf-8', + 'Location', + 'https://testaccount.dev.azuresynapse.net/libraryOperationResults/70976408-41ab-4baf-877c-435c95add06a?api-version=2019-06-01-preview', + 'Retry-After', + '10', + 'Server', + 'Microsoft-HTTPAPI/2.0', + 'Strict-Transport-Security', + 'max-age=31536000; includeSubDomains', + 'Access-Control-Allow-Headers', + 'Location', + 'Access-Control-Allow-Headers', + 'Retry-After', + 'Access-Control-Expose-Headers', + 'Location', + 'Access-Control-Expose-Headers', + 'Retry-After', + 'x-ms-request-id', + '58661621-db56-4f68-bfdb-4fc0ada0c96d', + 'Date', + 'Mon, 05 Apr 2021 21:27:50 GMT', + 'Connection', + 'close' ]); + +nock('https://testaccount.dev.azuresynapse.net', {"encodedQueryParams":true}) + .get('/libraryOperationResults/70976408-41ab-4baf-877c-435c95add06a') + .query(true) + .reply(202, {"status":"InProgress"}, [ 'Content-Length', + '23', + 'Content-Type', + 'application/json; charset=utf-8', + 'Location', + 'https://testaccount.dev.azuresynapse.net/libraryOperationResults/70976408-41ab-4baf-877c-435c95add06a?api-version=2019-06-01-preview', + 'Retry-After', + '10', + 'Server', + 'Microsoft-HTTPAPI/2.0', + 'Strict-Transport-Security', + 'max-age=31536000; includeSubDomains', + 'Access-Control-Allow-Headers', + 'Location', + 'Access-Control-Allow-Headers', + 'Retry-After', + 'Access-Control-Expose-Headers', + 'Location', + 'Access-Control-Expose-Headers', + 'Retry-After', + 'x-ms-request-id', + '3c3335e1-f480-49cd-90e6-e4dcde936769', + 'Date', + 'Mon, 05 Apr 2021 21:27:52 GMT', + 'Connection', + 'close' ]); + +nock('https://testaccount.dev.azuresynapse.net', {"encodedQueryParams":true}) + .get('/libraryOperationResults/70976408-41ab-4baf-877c-435c95add06a') + .query(true) + .reply(200, "", [ 'Content-Length', + '0', + 'Server', + 'Microsoft-HTTPAPI/2.0', + 'Strict-Transport-Security', + 'max-age=31536000; includeSubDomains', + 'x-ms-request-id', + '72a45ce5-f3ce-42ff-8769-112c348877fe', + 'Date', + 'Mon, 05 Apr 2021 21:27:54 GMT', + 'Connection', + 'close' ]); diff --git a/sdk/synapse/synapse-artifacts/recordings/node/library/recording_should_get_library.js b/sdk/synapse/synapse-artifacts/recordings/node/library/recording_should_get_library.js new file mode 100644 index 000000000000..4d0021e18d0d --- /dev/null +++ b/sdk/synapse/synapse-artifacts/recordings/node/library/recording_should_get_library.js @@ -0,0 +1,54 @@ +let nock = require('nock'); + +module.exports.hash = "d3a9c1b2e09a20beb97164081ea683a9"; + +module.exports.testInfo = {"uniqueName":{},"newDate":{}} + +nock('https://login.microsoftonline.com:443', {"encodedQueryParams":true}) + .post('/88888888-8888-8888-8888-888888888888/oauth2/v2.0/token', "response_type=token&grant_type=client_credentials&client_id=azure_client_id&client_secret=azure_client_secret&scope=https%3A%2F%2Fdev.azuresynapse.net%2F.default") + .reply(200, {"token_type":"Bearer","expires_in":86399,"ext_expires_in":86399,"access_token":"access_token"}, [ 'Cache-Control', + 'no-store, no-cache', + 'Pragma', + 'no-cache', + 'Content-Length', + '1322', + 'Content-Type', + 'application/json; charset=utf-8', + 'Expires', + '-1', + 'Strict-Transport-Security', + 'max-age=31536000; includeSubDomains', + 'X-Content-Type-Options', + 'nosniff', + 'P3P', + 'CP="DSP CUR OTPi IND OTRi ONL FIN"', + 'x-ms-request-id', + 'a4433f94-db9a-4a15-9895-e4715e946700', + 'x-ms-ests-server', + '2.1.11562.10 - NCUS ProdSlices', + 'Set-Cookie', + 'fpc=AvzULwO9SbVOqJjwdF4-3lHKOuyWCQAAAKt3_dcOAAAA; expires=Wed, 05-May-2021 21:27:50 GMT; path=/; secure; HttpOnly; SameSite=None', + 'Set-Cookie', + 'x-ms-gateway-slice=estsfd; path=/; secure; samesite=none; httponly', + 'Set-Cookie', + 'stsservicecookie=estsfd; path=/; secure; samesite=none; httponly', + 'Date', + 'Mon, 05 Apr 2021 21:27:49 GMT' ]); + +nock('https://testaccount.dev.azuresynapse.net', {"encodedQueryParams":true}) + .get('/libraries/testLibraryName.jar') + .query(true) + .reply(200, {"id":"/subscriptions/faa080af-c1d8-40ad-9cce-e1a450ca5b57/resourceGroups/xiangyan/providers/Microsoft.Synapse/workspaces/xysynapsetest/libraries/testLibraryName.jar","name":"testLibraryName.jar","type":"Microsoft.Synapse/workspaces/libraries","properties":{"name":"testLibraryName.jar","path":"xysynapsetest/libraries/testLibraryName.jar","containerName":"prep","uploadedTimestamp":"2021-04-05T21:27:43.855845+00:00","type":"jar","provisioningStatus":"Incomplete","creatorId":"30511c9d-ba1a-4c7b-b422-5b543da11b3f"},"etag":"ab06012b-0000-0800-0000-606b80d10000"}, [ 'Content-Length', + '563', + 'Content-Type', + 'application/json; charset=utf-8', + 'Server', + 'Microsoft-HTTPAPI/2.0', + 'Strict-Transport-Security', + 'max-age=31536000; includeSubDomains', + 'x-ms-request-id', + '3bd4139f-cf23-46f1-aa61-7efcbc539370', + 'Date', + 'Mon, 05 Apr 2021 21:27:49 GMT', + 'Connection', + 'close' ]); diff --git a/sdk/synapse/synapse-artifacts/recordings/node/library/recording_should_list_library.js b/sdk/synapse/synapse-artifacts/recordings/node/library/recording_should_list_library.js new file mode 100644 index 000000000000..1b1b50725d4c --- /dev/null +++ b/sdk/synapse/synapse-artifacts/recordings/node/library/recording_should_list_library.js @@ -0,0 +1,54 @@ +let nock = require('nock'); + +module.exports.hash = "9eb5ae69a3017b1057ab7a7ab3cb4404"; + +module.exports.testInfo = {"uniqueName":{},"newDate":{}} + +nock('https://login.microsoftonline.com:443', {"encodedQueryParams":true}) + .post('/88888888-8888-8888-8888-888888888888/oauth2/v2.0/token', "response_type=token&grant_type=client_credentials&client_id=azure_client_id&client_secret=azure_client_secret&scope=https%3A%2F%2Fdev.azuresynapse.net%2F.default") + .reply(200, {"token_type":"Bearer","expires_in":86399,"ext_expires_in":86399,"access_token":"access_token"}, [ 'Cache-Control', + 'no-store, no-cache', + 'Pragma', + 'no-cache', + 'Content-Length', + '1322', + 'Content-Type', + 'application/json; charset=utf-8', + 'Expires', + '-1', + 'Strict-Transport-Security', + 'max-age=31536000; includeSubDomains', + 'X-Content-Type-Options', + 'nosniff', + 'P3P', + 'CP="DSP CUR OTPi IND OTRi ONL FIN"', + 'x-ms-request-id', + '54777b7c-38ce-4e91-bef8-d402d77f5700', + 'x-ms-ests-server', + '2.1.11562.10 - WUS2 ProdSlices', + 'Set-Cookie', + 'fpc=AvzULwO9SbVOqJjwdF4-3lHKOuyWCQAAAKt3_dcOAAAA; expires=Wed, 05-May-2021 21:27:50 GMT; path=/; secure; HttpOnly; SameSite=None', + 'Set-Cookie', + 'x-ms-gateway-slice=estsfd; path=/; secure; samesite=none; httponly', + 'Set-Cookie', + 'stsservicecookie=estsfd; path=/; secure; samesite=none; httponly', + 'Date', + 'Mon, 05 Apr 2021 21:27:49 GMT' ]); + +nock('https://testaccount.dev.azuresynapse.net', {"encodedQueryParams":true}) + .get('/libraries') + .query(true) + .reply(200, {"value":[{"id":"/subscriptions/faa080af-c1d8-40ad-9cce-e1a450ca5b57/resourceGroups/xiangyan/providers/Microsoft.Synapse/workspaces/xysynapsetest/libraries/testLibraryName.jar","name":"testLibraryName.jar","type":"Microsoft.Synapse/workspaces/libraries","etag":"ab06012b-0000-0800-0000-606b80d10000","properties":{"name":"testLibraryName.jar","path":"xysynapsetest/libraries/testLibraryName.jar","containerName":"prep","uploadedTimestamp":"2021-04-05T21:27:43.855845+00:00","type":"jar","provisioningStatus":"Incomplete","creatorId":"30511c9d-ba1a-4c7b-b422-5b543da11b3f"}}]}, [ 'Content-Length', + '575', + 'Content-Type', + 'application/json; charset=utf-8', + 'Server', + 'Microsoft-HTTPAPI/2.0', + 'Strict-Transport-Security', + 'max-age=31536000; includeSubDomains', + 'x-ms-request-id', + 'ae02e55d-d9d6-4173-9691-810ed7d8a7bd', + 'Date', + 'Mon, 05 Apr 2021 21:27:50 GMT', + 'Connection', + 'close' ]); diff --git a/sdk/synapse/synapse-artifacts/rollup.config.js b/sdk/synapse/synapse-artifacts/rollup.config.js index 0d1ffc7a6542..5d7deee44c14 100644 --- a/sdk/synapse/synapse-artifacts/rollup.config.js +++ b/sdk/synapse/synapse-artifacts/rollup.config.js @@ -1,42 +1,3 @@ -import rollup from "rollup"; -import nodeResolve from "rollup-plugin-node-resolve"; -import sourcemaps from "rollup-plugin-sourcemaps"; -import cjs from "@rollup/plugin-commonjs"; -import { openTelemetryCommonJs } from "@azure/dev-tool/shared-config/rollup"; +import { makeConfig } from "@azure/dev-tool/shared-config/rollup"; -/** - * @type {rollup.RollupFileOptions} - */ -const config = { - input: "./dist-esm/artifactsClient.js", - external: ["@azure/core-http", "@azure/core-arm"], - output: { - file: "./dist/index.js", - format: "cjs", - name: "Azure.SynapseArtifacts", - sourcemap: true, - globals: { - "@azure/core-http": "coreHttp", - "@azure/core-arm": "coreArm" - }, - banner: `/* - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - * - * Code generated by Microsoft (R) AutoRest Code Generator. - * Changes may cause incorrect behavior and will be lost if the code is regenerated. - */ ` - }, - plugins: [ - nodeResolve({ module: true }), - sourcemaps(), - cjs({ - namedExports: { - assert: ["ok", "deepEqual", "equal", "fail", "deepStrictEqual", "strictEqual"], - ...openTelemetryCommonJs() - } - }) - ] -}; - -export default config; +export default makeConfig(require("./package.json")); diff --git a/sdk/synapse/synapse-artifacts/sample.env b/sdk/synapse/synapse-artifacts/sample.env new file mode 100644 index 000000000000..715dfd4edde2 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/sample.env @@ -0,0 +1,11 @@ +# Used to authenticate with the Azure Synapse Artifacts service +ENDPOINT="https://.dev.azuresynapse.net/" + +# Used to authenticate using Azure AD as a service principal for role-based +# authentication. +# +# See the documentation for `EnvironmentCredential` at the following link: +# https://docs.microsoft.com/javascript/api/@azure/identity/environmentcredential +AZURE_TENANT_ID= +AZURE_CLIENT_ID= +AZURE_CLIENT_SECRET= diff --git a/sdk/synapse/synapse-artifacts/src/index.ts b/sdk/synapse/synapse-artifacts/src/index.ts index 350ba0271609..ab0616ec8ef2 100644 --- a/sdk/synapse/synapse-artifacts/src/index.ts +++ b/sdk/synapse/synapse-artifacts/src/index.ts @@ -6,6 +6,8 @@ * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ /// + +import "@azure/core-paging"; export * from "./models"; export { LROPoller, diff --git a/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts b/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts index 94e74722c0cf..0d1228ff5def 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts @@ -5,7 +5,7 @@ * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ - +/// import { SpanStatusCode } from "@azure/core-tracing"; import { createSpan } from "../tracing"; import "@azure/core-paging"; @@ -27,7 +27,6 @@ import { DataFlowGetDataFlowsByWorkspaceNextResponse } from "../models"; -/// /** Class representing a DataFlow. */ export class DataFlowImpl implements DataFlow { private readonly client: ArtifactsClientContext; diff --git a/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts b/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts index d199d6093fb7..05071948dcf1 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts @@ -6,6 +6,7 @@ * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ +/// import { SpanStatusCode } from "@azure/core-tracing"; import { createSpan } from "../tracing"; import "@azure/core-paging"; @@ -29,7 +30,6 @@ import { DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceNextResponse } from "../models"; -/// /** Class representing a DataFlowDebugSession. */ export class DataFlowDebugSessionImpl implements DataFlowDebugSession { private readonly client: ArtifactsClientContext; diff --git a/sdk/synapse/synapse-artifacts/src/operations/dataset.ts b/sdk/synapse/synapse-artifacts/src/operations/dataset.ts index 1efd41ff66fa..9229ba368e23 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/dataset.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/dataset.ts @@ -5,7 +5,7 @@ * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ - +/// import { SpanStatusCode } from "@azure/core-tracing"; import { createSpan } from "../tracing"; import "@azure/core-paging"; @@ -27,7 +27,6 @@ import { DatasetGetDatasetsByWorkspaceNextResponse } from "../models"; -/// /** Class representing a Dataset. */ export class DatasetImpl implements Dataset { private readonly client: ArtifactsClientContext; diff --git a/sdk/synapse/synapse-artifacts/src/operations/library.ts b/sdk/synapse/synapse-artifacts/src/operations/library.ts index 91f61baa05b2..740d2049db24 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/library.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/library.ts @@ -5,7 +5,7 @@ * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ - +/// import { SpanStatusCode } from "@azure/core-tracing"; import { createSpan } from "../tracing"; import "@azure/core-paging"; @@ -25,7 +25,6 @@ import { LibraryListNextResponse } from "../models"; -/// /** Class representing a Library. */ export class LibraryImpl implements Library { private readonly client: ArtifactsClientContext; diff --git a/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts b/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts index 07804ed8c7bf..5b936f4dcaf8 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts @@ -6,6 +6,7 @@ * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ +/// import { SpanStatusCode } from "@azure/core-tracing"; import { createSpan } from "../tracing"; import "@azure/core-paging"; @@ -27,7 +28,6 @@ import { LinkedServiceGetLinkedServicesByWorkspaceNextResponse } from "../models"; -/// /** Class representing a LinkedService. */ export class LinkedServiceImpl implements LinkedService { private readonly client: ArtifactsClientContext; diff --git a/sdk/synapse/synapse-artifacts/src/operations/notebook.ts b/sdk/synapse/synapse-artifacts/src/operations/notebook.ts index d39519bc9d03..f5d3fcfa794e 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/notebook.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/notebook.ts @@ -6,6 +6,7 @@ * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ +/// import { SpanStatusCode } from "@azure/core-tracing"; import { createSpan } from "../tracing"; import "@azure/core-paging"; @@ -29,7 +30,6 @@ import { NotebookGetNotebookSummaryByWorkSpaceNextResponse } from "../models"; -/// /** Class representing a Notebook. */ export class NotebookImpl implements Notebook { private readonly client: ArtifactsClientContext; diff --git a/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts b/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts index 19b90d10778f..077cd32e3577 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts @@ -6,6 +6,7 @@ * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ +/// import { SpanStatusCode } from "@azure/core-tracing"; import { createSpan } from "../tracing"; import "@azure/core-paging"; @@ -29,7 +30,6 @@ import { PipelineGetPipelinesByWorkspaceNextResponse } from "../models"; -/// /** Class representing a Pipeline. */ export class PipelineImpl implements Pipeline { private readonly client: ArtifactsClientContext; diff --git a/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts b/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts index a047f7dd68ea..64a225b50c5c 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts @@ -6,6 +6,7 @@ * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ +/// import { SpanStatusCode } from "@azure/core-tracing"; import { createSpan } from "../tracing"; import "@azure/core-paging"; @@ -29,7 +30,6 @@ import { SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceNextResponse } from "../models"; -/// /** Class representing a SparkJobDefinition. */ export class SparkJobDefinitionImpl implements SparkJobDefinition { private readonly client: ArtifactsClientContext; diff --git a/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts b/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts index a02ffb8e8dc2..f7bd0d898e1f 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts @@ -6,6 +6,7 @@ * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ +/// import { SpanStatusCode } from "@azure/core-tracing"; import { createSpan } from "../tracing"; import "@azure/core-paging"; @@ -27,7 +28,6 @@ import { SqlScriptGetSqlScriptsByWorkspaceNextResponse } from "../models"; -/// /** Class representing a SqlScript. */ export class SqlScriptImpl implements SqlScript { private readonly client: ArtifactsClientContext; diff --git a/sdk/synapse/synapse-artifacts/src/operations/trigger.ts b/sdk/synapse/synapse-artifacts/src/operations/trigger.ts index 144b7f1d9385..f34bc602874a 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/trigger.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/trigger.ts @@ -6,6 +6,7 @@ * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ +/// import { SpanStatusCode } from "@azure/core-tracing"; import { createSpan } from "../tracing"; import "@azure/core-paging"; @@ -29,7 +30,6 @@ import { TriggerGetTriggersByWorkspaceNextResponse } from "../models"; -/// /** Class representing a Trigger. */ export class TriggerImpl implements Trigger { private readonly client: ArtifactsClientContext; diff --git a/sdk/synapse/synapse-artifacts/test/public/bigDataPools.spec.ts b/sdk/synapse/synapse-artifacts/test/public/bigDataPools.spec.ts new file mode 100644 index 000000000000..be9eac73b004 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/test/public/bigDataPools.spec.ts @@ -0,0 +1,29 @@ +import { ArtifactsClient } from "../../src/artifactsClient"; +import { Recorder } from "@azure/test-utils-recorder"; +import { assert } from "chai"; +import { createClient, createRecorder } from "./utils/recordedClient"; + +describe("BigDataPools", () => { + let recorder: Recorder; + let client: ArtifactsClient; + + beforeEach(function() { + recorder = createRecorder(this); + client = createClient(); + }); + + afterEach(async () => { + await recorder.stop(); + }); + + it("should list bigDataPools", async () => { + const result = await client.bigDataPools.list(); + assert.ok(result.value && result.value.length >= 1, "Result doesn't contain any values"); + }); + + it("should get a bigDataPool by name", async () => { + const expectedPoolName = "testsparkpool"; + const result = await client.bigDataPools.get(expectedPoolName); + assert.equal(result.name, expectedPoolName); + }); +}); diff --git a/sdk/synapse/synapse-artifacts/test/public/dataFlows.spec.ts b/sdk/synapse/synapse-artifacts/test/public/dataFlows.spec.ts new file mode 100644 index 000000000000..84a69beb728a --- /dev/null +++ b/sdk/synapse/synapse-artifacts/test/public/dataFlows.spec.ts @@ -0,0 +1,63 @@ +import { ArtifactsClient } from "../../src/artifactsClient"; +import { Recorder } from "@azure/test-utils-recorder"; +import { assert } from "chai"; +import { createClient, createRecorder } from "./utils/recordedClient"; + +describe("DataFlow", () => { + let recorder: Recorder; + let client: ArtifactsClient; + const dataFlowName = "testdataflow"; + const renamedDataflow = "testdataflow2"; + + beforeEach(function() { + recorder = createRecorder(this); + client = createClient(); + }); + + afterEach(async () => { + await recorder.stop(); + }); + + it("should create dataFlow", async () => { + const poller = await client.dataFlow.createOrUpdateDataFlow(dataFlowName, { + properties: { type: "MappingDataFlow" } + }); + + const result = await poller.pollUntilDone(); + + assert.equal(result.name, dataFlowName); + }).timeout(30000); + + it("should list dataFlows", async () => { + const dataflows = client.dataFlow.listDataFlowsByWorkspace(); + let count = 0; + for await (const item of dataflows) { + if (item) { + count++; + } + } + + assert.ok(count > 0, "No data flows found"); + }).timeout(30000); + + it("should get dataFlow", async () => { + const dataFlow = await client.dataFlow.getDataFlow(dataFlowName); + assert.equal(dataFlow.name, dataFlowName); + }); + + it("should rename dataFlow", async () => { + const poller = await client.dataFlow.renameDataFlow(dataFlowName, { + newName: renamedDataflow + }); + const result = await poller.pollUntilDone(); + + assert.equal(result._response.status, 200); + }).timeout(30000); + + it("should delete dataFlow", async () => { + const poller = await client.dataFlow.deleteDataFlow(renamedDataflow); + const result = await poller.pollUntilDone(); + + assert.equal(result._response.status, 200); + }).timeout(30000); +}); diff --git a/sdk/synapse/synapse-artifacts/test/public/integrationRuntimes.spec.ts b/sdk/synapse/synapse-artifacts/test/public/integrationRuntimes.spec.ts new file mode 100644 index 000000000000..02919abf1425 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/test/public/integrationRuntimes.spec.ts @@ -0,0 +1,32 @@ +import { ArtifactsClient } from "../../src/artifactsClient"; +import { Recorder } from "@azure/test-utils-recorder"; +import { assert } from "chai"; +import { createClient, createRecorder } from "./utils/recordedClient"; + +describe("IntegrationRuntimes", () => { + let recorder: Recorder; + let client: ArtifactsClient; + + beforeEach(function() { + recorder = createRecorder(this); + client = createClient(); + }); + + afterEach(async () => { + await recorder.stop(); + }); + + it("should list integrationRuntimes", async () => { + const result = await client.integrationRuntimes.list(); + if (result.value.length) { + assert.equal(result.value[0].name, "AutoResolveIntegrationRuntime"); + } else { + assert.fail("No integrationRuntimes found"); + } + }); + + it("should get integrationRuntimes", async () => { + const result = await client.integrationRuntimes.get("AutoResolveIntegrationRuntime"); + assert.equal(result.name, "AutoResolveIntegrationRuntime"); + }); +}); diff --git a/sdk/synapse/synapse-artifacts/test/public/library.spec.ts b/sdk/synapse/synapse-artifacts/test/public/library.spec.ts new file mode 100644 index 000000000000..6822b6f9e4fd --- /dev/null +++ b/sdk/synapse/synapse-artifacts/test/public/library.spec.ts @@ -0,0 +1,47 @@ +import { ArtifactsClient } from "../../src/artifactsClient"; +import { Recorder } from "@azure/test-utils-recorder"; +import { assert } from "chai"; +import { createClient, createRecorder } from "./utils/recordedClient"; + +describe("Library", () => { + let recorder: Recorder; + let client: ArtifactsClient; + + beforeEach(function() { + recorder = createRecorder(this); + client = createClient(); + }); + + afterEach(async () => { + await recorder.stop(); + }); + + const testLibraryName = "testLibraryName.jar"; + it("should create library", async () => { + const poller = await client.library.create(testLibraryName); + const result = await poller.pollUntilDone(); + assert.equal(result._response.status, 200); + }).timeout(30000); + + it("should list library", async () => { + const libraries = client.library.list(); + + let count = 0; + for await (const _library of libraries) { + count++; + } + + assert.ok(count > 0); + }); + + it("should get library", async () => { + const result = await client.library.get(testLibraryName); + assert.equal(result.name, testLibraryName); + }); + + it("should delete library", async () => { + const poller = await client.library.delete(testLibraryName); + const result = await poller.pollUntilDone(); + assert.equal(result._response.status, 200); + }).timeout(30000); +}); diff --git a/sdk/synapse/synapse-artifacts/test/public/utils/env.browser.ts b/sdk/synapse/synapse-artifacts/test/public/utils/env.browser.ts new file mode 100644 index 000000000000..fd2aca680c7b --- /dev/null +++ b/sdk/synapse/synapse-artifacts/test/public/utils/env.browser.ts @@ -0,0 +1,2 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. diff --git a/sdk/synapse/synapse-artifacts/test/public/utils/env.ts b/sdk/synapse/synapse-artifacts/test/public/utils/env.ts new file mode 100644 index 000000000000..0e06855b73ae --- /dev/null +++ b/sdk/synapse/synapse-artifacts/test/public/utils/env.ts @@ -0,0 +1,6 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + +import * as dotenv from "dotenv"; + +dotenv.config(); diff --git a/sdk/synapse/synapse-artifacts/test/public/utils/recordedClient.ts b/sdk/synapse/synapse-artifacts/test/public/utils/recordedClient.ts new file mode 100644 index 000000000000..9d22a672e198 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/test/public/utils/recordedClient.ts @@ -0,0 +1,63 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + +/// + +import { Context } from "mocha"; + +import { env, Recorder, record, RecorderEnvironmentSetup } from "@azure/test-utils-recorder"; +import { TokenCredential, ClientSecretCredential } from "@azure/identity"; + +import { ArtifactsClient, ArtifactsClientOptionalParams } from "../../../src"; +import "./env"; + +const replaceableVariables: { [k: string]: string } = { + AZURE_CLIENT_ID: "azure_client_id", + AZURE_CLIENT_SECRET: "azure_client_secret", + AZURE_TENANT_ID: "88888888-8888-8888-8888-888888888888", + TEXT_ANALYTICS_API_KEY: "api_key", + // Second API key + TEXT_ANALYTICS_API_KEY_ALT: "api_key_alt", + ENDPOINT: "https://testaccount.dev.azuresynapse.net" +}; + +export const environmentSetup: RecorderEnvironmentSetup = { + replaceableVariables, + customizationsOnRecordings: [ + (recording: string): string => + recording.replace(/"access_token"\s?:\s?"[^"]*"/g, `"access_token":"access_token"`), + // If we put ENDPOINT in replaceableVariables above, it will not capture + // the endpoint string used with nock, which will be expanded to + // https://:443/ and therefore will not match, so we have to do + // this instead. + (recording: string): string => { + const replaced = recording.replace( + "testaccount.dev.azuresynapse.net:443", + "testaccount.dev.azuresynapse.net" + ); + return replaced; + } + ], + queryParametersToSkip: [] +}; + +export function createClient(options?: ArtifactsClientOptionalParams): ArtifactsClient { + let credential: TokenCredential; + + credential = new ClientSecretCredential( + env.AZURE_TENANT_ID, + env.AZURE_CLIENT_ID, + env.AZURE_CLIENT_SECRET + ); + + return new ArtifactsClient(credential, env.ENDPOINT, options); +} + +/** + * creates the recorder and reads the environment variables from the `.env` file. + * Should be called first in the test suite to make sure environment variables are + * read before they are being used. + */ +export function createRecorder(context: Context): Recorder { + return record(context, environmentSetup); +} diff --git a/sdk/synapse/synapse-artifacts/tsconfig.json b/sdk/synapse/synapse-artifacts/tsconfig.json index d43efedfc9bc..661ccc02eae0 100644 --- a/sdk/synapse/synapse-artifacts/tsconfig.json +++ b/sdk/synapse/synapse-artifacts/tsconfig.json @@ -1,20 +1,9 @@ { + "extends": "../../../tsconfig.package", "compilerOptions": { - "module": "es6", - "moduleResolution": "node", - "strict": true, - "target": "es5", - "sourceMap": true, - "declarationMap": true, - "esModuleInterop": true, - "allowSyntheticDefaultImports": true, - "forceConsistentCasingInFileNames": true, - "preserveConstEnums": true, - "lib": ["es6", "dom"], - "declaration": true, "outDir": "./dist-esm", - "importHelpers": true + "declarationDir": "./types" }, - "include": ["./src/**/*.ts"], - "exclude": ["node_modules"] + "include": ["src/**/*.ts", "test/**/*.ts"], + "exclude": ["node_modules", "types", "temp", "browser", "dist*", "./dom-shims.d.ts"] } From 1243e958a71f71342f09811c1bd35e7c5a957132 Mon Sep 17 00:00:00 2001 From: Jose Manuel Heredia Hidalgo Date: Tue, 6 Apr 2021 13:43:36 -0700 Subject: [PATCH 6/6] Update sdk/synapse/synapse-artifacts/package.json Co-authored-by: chradek <51000525+chradek@users.noreply.github.com> --- sdk/synapse/synapse-artifacts/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/synapse/synapse-artifacts/package.json b/sdk/synapse/synapse-artifacts/package.json index 11a02a72ea81..63f1352002b5 100644 --- a/sdk/synapse/synapse-artifacts/package.json +++ b/sdk/synapse/synapse-artifacts/package.json @@ -126,7 +126,7 @@ }, { "path": "swagger/README.md", - "prefix": "pacage-version" + "prefix": "package-version" } ] }