diff --git a/src/datashare/HISTORY.rst b/src/datashare/HISTORY.rst
index 2f7a7a548bf..1c139576ba0 100644
--- a/src/datashare/HISTORY.rst
+++ b/src/datashare/HISTORY.rst
@@ -1,13 +1,8 @@
-.. :changelog:
-
-Release History
-===============
-
-0.1.1
-+++++
-* Add examples for creating a BlobFolder dataset
-
-
-0.1.0
-++++++
-* Initial release.
+.. :changelog:
+
+Release History
+===============
+
+0.1.0
+++++++
+* Initial release.
diff --git a/src/datashare/azext_datashare/__init__.py b/src/datashare/azext_datashare/__init__.py
index 505737b668a..921d22c1a54 100644
--- a/src/datashare/azext_datashare/__init__.py
+++ b/src/datashare/azext_datashare/__init__.py
@@ -1,49 +1,50 @@
-# --------------------------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# --------------------------------------------------------------------------------------------
-# pylint: disable=unused-argument
-# pylint: disable=unused-import
-
-from azure.cli.core import AzCommandsLoader
-from azext_datashare._help import helps
-
-
-class DataShareManagementClientCommandsLoader(AzCommandsLoader):
-
- def __init__(self, cli_ctx=None):
- from azure.cli.core.commands import CliCommandType
- from azext_datashare.generated._client_factory import cf_datashare
- datashare_custom = CliCommandType(
- operations_tmpl='azext_datashare.manual.custom#{}', # modified
- client_factory=cf_datashare)
- super(DataShareManagementClientCommandsLoader, self).__init__(cli_ctx=cli_ctx,
- custom_command_type=datashare_custom)
-
- def load_command_table(self, args):
- try:
- from azext_datashare.generated.commands import load_command_table
- load_command_table(self, args)
- except ImportError:
- pass
- try:
- from azext_datashare.manual.commands import load_command_table as load_command_table_manual
- load_command_table_manual(self, args)
- except ImportError:
- pass
- return self.command_table
-
- def load_arguments(self, command):
- try:
- from azext_datashare.generated._params import load_arguments
- load_arguments(self, command)
- except ImportError:
- pass
- try:
- from azext_datashare.manual._params import load_arguments as load_arguments_manual
- load_arguments_manual(self, command)
- except ImportError:
- pass
-
-
-COMMAND_LOADER_CLS = DataShareManagementClientCommandsLoader
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+
+from azure.cli.core import AzCommandsLoader
+from azext_datashare.generated._help import helps # pylint: disable=unused-import
+try:
+ from azext_datashare.manual._help import helps # pylint: disable=reimported
+except ImportError:
+ pass
+
+
+class DataShareManagementClientCommandsLoader(AzCommandsLoader):
+
+ def __init__(self, cli_ctx=None):
+ from azure.cli.core.commands import CliCommandType
+ from azext_datashare.generated._client_factory import cf_datashare_cl
+ datashare_custom = CliCommandType(
+ operations_tmpl='azext_datashare.custom#{}',
+ client_factory=cf_datashare_cl)
+ parent = super(DataShareManagementClientCommandsLoader, self)
+ parent.__init__(cli_ctx=cli_ctx, custom_command_type=datashare_custom)
+
+ def load_command_table(self, args):
+ from azext_datashare.generated.commands import load_command_table
+ load_command_table(self, args)
+ try:
+ from azext_datashare.manual.commands import load_command_table as load_command_table_manual
+ load_command_table_manual(self, args)
+ except ImportError:
+ pass
+ return self.command_table
+
+ def load_arguments(self, command):
+ from azext_datashare.generated._params import load_arguments
+ load_arguments(self, command)
+ try:
+ from azext_datashare.manual._params import load_arguments as load_arguments_manual
+ load_arguments_manual(self, command)
+ except ImportError:
+ pass
+
+
+COMMAND_LOADER_CLS = DataShareManagementClientCommandsLoader
diff --git a/src/datashare/azext_datashare/_params.py b/src/datashare/azext_datashare/_params.py
deleted file mode 100644
index d1283049baf..00000000000
--- a/src/datashare/azext_datashare/_params.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# --------------------------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# --------------------------------------------------------------------------------------------
-# pylint: disable=wildcard-import
-# pylint: disable=unused-wildcard-import
-
-try:
- from .generated._params import * # noqa: F403
-except ImportError:
- pass
-
-try:
- from .manual._params import * # noqa: F403
-except ImportError:
- pass
diff --git a/src/datashare/azext_datashare/action.py b/src/datashare/azext_datashare/action.py
index 4ad472a8c52..d95d53bf711 100644
--- a/src/datashare/azext_datashare/action.py
+++ b/src/datashare/azext_datashare/action.py
@@ -1,16 +1,17 @@
-# --------------------------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# --------------------------------------------------------------------------------------------
-# pylint: disable=wildcard-import
-# pylint: disable=unused-wildcard-import
-
-try:
- from .generated.action import * # noqa: F403
-except ImportError:
- pass
-
-try:
- from .manual.action import * # noqa: F403
-except ImportError:
- pass
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=wildcard-import
+# pylint: disable=unused-wildcard-import
+
+from .generated.action import * # noqa: F403
+try:
+ from .manual.action import * # noqa: F403
+except ImportError:
+ pass
diff --git a/src/datashare/azext_datashare/azext_metadata.json b/src/datashare/azext_datashare/azext_metadata.json
index 7b56fb1e11a..cfc30c747c7 100644
--- a/src/datashare/azext_datashare/azext_metadata.json
+++ b/src/datashare/azext_datashare/azext_metadata.json
@@ -1,4 +1,4 @@
-{
- "azext.isExperimental": true,
- "azext.minCliCoreVersion": "2.3.1"
+{
+ "azext.isExperimental": true,
+ "azext.minCliCoreVersion": "2.15.0"
}
\ No newline at end of file
diff --git a/src/datashare/azext_datashare/_help.py b/src/datashare/azext_datashare/custom.py
similarity index 54%
rename from src/datashare/azext_datashare/_help.py
rename to src/datashare/azext_datashare/custom.py
index c0b36e140d8..dbe9d5f9742 100644
--- a/src/datashare/azext_datashare/_help.py
+++ b/src/datashare/azext_datashare/custom.py
@@ -1,16 +1,17 @@
-# --------------------------------------------------------------------------------------------
+# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# --------------------------------------------------------------------------------------------
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
# pylint: disable=wildcard-import
# pylint: disable=unused-wildcard-import
-# try:
-# from .generated._help import * # noqa: F403
-# except ImportError:
-# pass
-
+from .generated.custom import * # noqa: F403
try:
- from .manual._help import * # noqa: F403
+ from .manual.custom import * # noqa: F403
except ImportError:
pass
diff --git a/src/datashare/azext_datashare/generated/_client_factory.py b/src/datashare/azext_datashare/generated/_client_factory.py
index bba6d6bcfb5..5378b2b7349 100644
--- a/src/datashare/azext_datashare/generated/_client_factory.py
+++ b/src/datashare/azext_datashare/generated/_client_factory.py
@@ -1,54 +1,60 @@
-# --------------------------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# --------------------------------------------------------------------------------------------
-
-
-def cf_datashare(cli_ctx, *_):
- from azure.cli.core.commands.client_factory import get_mgmt_service_client
- from ..vendored_sdks.datashare import DataShareManagementClient
- return get_mgmt_service_client(cli_ctx, DataShareManagementClient)
-
-
-def cf_account(cli_ctx, *_):
- return cf_datashare(cli_ctx).account
-
-
-def cf_consumer_invitation(cli_ctx, *_):
- return cf_datashare(cli_ctx).consumer_invitation
-
-
-def cf_data_set(cli_ctx, *_):
- return cf_datashare(cli_ctx).data_set
-
-
-def cf_data_set_mapping(cli_ctx, *_):
- return cf_datashare(cli_ctx).data_set_mapping
-
-
-def cf_invitation(cli_ctx, *_):
- return cf_datashare(cli_ctx).invitation
-
-
-def cf_share(cli_ctx, *_):
- return cf_datashare(cli_ctx).share
-
-
-def cf_provider_share_subscription(cli_ctx, *_):
- return cf_datashare(cli_ctx).provider_share_subscription
-
-
-def cf_share_subscription(cli_ctx, *_):
- return cf_datashare(cli_ctx).share_subscription
-
-
-def cf_consumer_source_data_set(cli_ctx, *_):
- return cf_datashare(cli_ctx).consumer_source_data_set
-
-
-def cf_synchronization_setting(cli_ctx, *_):
- return cf_datashare(cli_ctx).synchronization_setting
-
-
-def cf_trigger(cli_ctx, *_):
- return cf_datashare(cli_ctx).trigger
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+
+
+def cf_datashare_cl(cli_ctx, *_):
+ from azure.cli.core.commands.client_factory import get_mgmt_service_client
+ from azext_datashare.vendored_sdks.datashare import DataShareManagementClient
+ return get_mgmt_service_client(cli_ctx,
+ DataShareManagementClient)
+
+
+def cf_account(cli_ctx, *_):
+ return cf_datashare_cl(cli_ctx).accounts
+
+
+def cf_consumer_invitation(cli_ctx, *_):
+ return cf_datashare_cl(cli_ctx).consumer_invitations
+
+
+def cf_data_set(cli_ctx, *_):
+ return cf_datashare_cl(cli_ctx).data_sets
+
+
+def cf_data_set_mapping(cli_ctx, *_):
+ return cf_datashare_cl(cli_ctx).data_set_mappings
+
+
+def cf_invitation(cli_ctx, *_):
+ return cf_datashare_cl(cli_ctx).invitations
+
+
+def cf_share(cli_ctx, *_):
+ return cf_datashare_cl(cli_ctx).shares
+
+
+def cf_provider_share_subscription(cli_ctx, *_):
+ return cf_datashare_cl(cli_ctx).provider_share_subscriptions
+
+
+def cf_share_subscription(cli_ctx, *_):
+ return cf_datashare_cl(cli_ctx).share_subscriptions
+
+
+def cf_consumer_source_data_set(cli_ctx, *_):
+ return cf_datashare_cl(cli_ctx).consumer_source_data_sets
+
+
+def cf_synchronization_setting(cli_ctx, *_):
+ return cf_datashare_cl(cli_ctx).synchronization_settings
+
+
+def cf_trigger(cli_ctx, *_):
+ return cf_datashare_cl(cli_ctx).triggers
diff --git a/src/datashare/azext_datashare/generated/_help.py b/src/datashare/azext_datashare/generated/_help.py
new file mode 100644
index 00000000000..879efbb7e47
--- /dev/null
+++ b/src/datashare/azext_datashare/generated/_help.py
@@ -0,0 +1,988 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=too-many-lines
+
+from knack.help_files import helps
+
+
+helps['datashare account'] = """
+ type: group
+ short-summary: Manage account with datashare
+"""
+
+helps['datashare account list'] = """
+ type: command
+ short-summary: "List Accounts in ResourceGroup And List Accounts in Subscription."
+ examples:
+ - name: Accounts_ListByResourceGroup
+ text: |-
+ az datashare account list --resource-group "SampleResourceGroup"
+ - name: Accounts_ListBySubscription
+ text: |-
+ az datashare account list
+"""
+
+helps['datashare account show'] = """
+ type: command
+ short-summary: "Get an account."
+ examples:
+ - name: Accounts_Get
+ text: |-
+ az datashare account show --name "Account1" --resource-group "SampleResourceGroup"
+"""
+
+helps['datashare account create'] = """
+ type: command
+ short-summary: "Create an account."
+ examples:
+ - name: Accounts_Create
+ text: |-
+ az datashare account create --location "West US 2" --tags tag1="Red" tag2="White" --name "Account1" \
+--resource-group "SampleResourceGroup"
+"""
+
+helps['datashare account update'] = """
+ type: command
+ short-summary: "Patch an account."
+ examples:
+ - name: Accounts_Update
+ text: |-
+ az datashare account update --name "Account1" --tags tag1="Red" tag2="White" --resource-group \
+"SampleResourceGroup"
+"""
+
+helps['datashare account delete'] = """
+ type: command
+ short-summary: "DeleteAccount."
+ examples:
+ - name: Accounts_Delete
+ text: |-
+ az datashare account delete --name "Account1" --resource-group "SampleResourceGroup"
+"""
+
+helps['datashare account wait'] = """
+ type: command
+ short-summary: Place the CLI in a waiting state until a condition of the datashare account is met.
+ examples:
+ - name: Pause executing next line of CLI script until the datashare account is successfully created.
+ text: |-
+ az datashare account wait --name "Account1" --resource-group "SampleResourceGroup" --created
+ - name: Pause executing next line of CLI script until the datashare account is successfully deleted.
+ text: |-
+ az datashare account wait --name "Account1" --resource-group "SampleResourceGroup" --deleted
+"""
+
+helps['datashare consumer-invitation'] = """
+ type: group
+ short-summary: Manage consumer invitation with datashare
+"""
+
+helps['datashare consumer-invitation show'] = """
+ type: command
+ short-summary: "Get an invitation."
+ examples:
+ - name: ConsumerInvitations_Get
+ text: |-
+ az datashare consumer-invitation show --invitation-id "dfbbc788-19eb-4607-a5a1-c74181bfff03" --location \
+"East US 2"
+"""
+
+helps['datashare consumer-invitation list-invitation'] = """
+ type: command
+ short-summary: "Lists invitations."
+ examples:
+ - name: ConsumerInvitations_ListInvitations
+ text: |-
+ az datashare consumer-invitation list-invitation
+"""
+
+helps['datashare consumer-invitation reject-invitation'] = """
+ type: command
+ short-summary: "Reject an invitation."
+ examples:
+ - name: ConsumerInvitations_RejectInvitation
+ text: |-
+ az datashare consumer-invitation reject-invitation --invitation-id "dfbbc788-19eb-4607-a5a1-c74181bfff03\
+" --location "East US 2"
+"""
+
+helps['datashare data-set'] = """
+ type: group
+ short-summary: Manage data set with datashare
+"""
+
+helps['datashare data-set list'] = """
+ type: command
+ short-summary: "List DataSets in a share."
+ examples:
+ - name: DataSets_ListByShare
+ text: |-
+ az datashare data-set list --account-name "Account1" --resource-group "SampleResourceGroup" \
+--share-name "Share1"
+"""
+
+helps['datashare data-set show'] = """
+ type: command
+ short-summary: "Get a DataSet in a share."
+ examples:
+ - name: DataSets_Get
+ text: |-
+ az datashare data-set show --account-name "Account1" --name "Dataset1" --resource-group \
+"SampleResourceGroup" --share-name "Share1"
+"""
+
+helps['datashare data-set create'] = """
+ type: command
+ short-summary: "Create a DataSet."
+ parameters:
+ - name: --adls-gen1-file-data-set
+ short-summary: "An ADLS Gen 1 file data set."
+ long-summary: |
+ Usage: --adls-gen1-file-data-set account-name=XX file-name=XX folder-path=XX resource-group=XX \
+subscription-id=XX kind=XX
+
+ account-name: Required. The ADLS account name.
+ file-name: Required. The file name in the ADLS account.
+ folder-path: Required. The folder path within the ADLS account.
+ resource-group: Required. Resource group of ADLS account.
+ subscription-id: Required. Subscription id of ADLS account.
+ kind: Required. Kind of data set.
+ - name: --adls-gen1-folder-data-set
+ short-summary: "An ADLS Gen 1 folder data set."
+ long-summary: |
+ Usage: --adls-gen1-folder-data-set account-name=XX folder-path=XX resource-group=XX subscription-id=XX \
+kind=XX
+
+ account-name: Required. The ADLS account name.
+ folder-path: Required. The folder path within the ADLS account.
+ resource-group: Required. Resource group of ADLS account.
+ subscription-id: Required. Subscription id of ADLS account.
+ kind: Required. Kind of data set.
+ - name: --adls-gen2-file-data-set
+ short-summary: "An ADLS Gen 2 file data set."
+ long-summary: |
+ Usage: --adls-gen2-file-data-set file-path=XX file-system=XX resource-group=XX storage-account-name=XX \
+subscription-id=XX kind=XX
+
+ file-path: Required. File path within the file system.
+ file-system: Required. File system to which the file belongs.
+ resource-group: Required. Resource group of storage account
+ storage-account-name: Required. Storage account name of the source data set
+ subscription-id: Required. Subscription id of storage account
+ kind: Required. Kind of data set.
+ - name: --adls-gen2-file-system-data-set
+ short-summary: "An ADLS Gen 2 file system data set."
+ long-summary: |
+ Usage: --adls-gen2-file-system-data-set file-system=XX resource-group=XX storage-account-name=XX \
+subscription-id=XX kind=XX
+
+ file-system: Required. The file system name.
+ resource-group: Required. Resource group of storage account
+ storage-account-name: Required. Storage account name of the source data set
+ subscription-id: Required. Subscription id of storage account
+ kind: Required. Kind of data set.
+ - name: --adls-gen2-folder-data-set
+ short-summary: "An ADLS Gen 2 folder data set."
+ long-summary: |
+ Usage: --adls-gen2-folder-data-set file-system=XX folder-path=XX resource-group=XX storage-account-name=XX \
+subscription-id=XX kind=XX
+
+ file-system: Required. File system to which the folder belongs.
+ folder-path: Required. Folder path within the file system.
+ resource-group: Required. Resource group of storage account
+ storage-account-name: Required. Storage account name of the source data set
+ subscription-id: Required. Subscription id of storage account
+ kind: Required. Kind of data set.
+ - name: --blob-container-data-set
+ short-summary: "An Azure storage blob container data set."
+ long-summary: |
+ Usage: --blob-container-data-set container-name=XX resource-group=XX storage-account-name=XX \
+subscription-id=XX kind=XX
+
+ container-name: Required. BLOB Container name.
+ resource-group: Required. Resource group of storage account
+ storage-account-name: Required. Storage account name of the source data set
+ subscription-id: Required. Subscription id of storage account
+ kind: Required. Kind of data set.
+ - name: --blob-data-set
+ short-summary: "An Azure storage blob data set."
+ long-summary: |
+ Usage: --blob-data-set container-name=XX file-path=XX resource-group=XX storage-account-name=XX \
+subscription-id=XX kind=XX
+
+ container-name: Required. Container that has the file path.
+ file-path: Required. File path within the source data set
+ resource-group: Required. Resource group of storage account
+ storage-account-name: Required. Storage account name of the source data set
+ subscription-id: Required. Subscription id of storage account
+ kind: Required. Kind of data set.
+ - name: --blob-folder-data-set
+ short-summary: "An Azure storage blob folder data set."
+ long-summary: |
+ Usage: --blob-folder-data-set container-name=XX prefix=XX resource-group=XX storage-account-name=XX \
+subscription-id=XX kind=XX
+
+ container-name: Required. Container that has the file path.
+ prefix: Required. Prefix for blob folder
+ resource-group: Required. Resource group of storage account
+ storage-account-name: Required. Storage account name of the source data set
+ subscription-id: Required. Subscription id of storage account
+ kind: Required. Kind of data set.
+ - name: --kusto-cluster-data-set
+ short-summary: "A kusto cluster data set."
+ long-summary: |
+ Usage: --kusto-cluster-data-set kusto-cluster-resource-id=XX kind=XX
+
+ kusto-cluster-resource-id: Required. Resource id of the kusto cluster.
+ kind: Required. Kind of data set.
+ - name: --kusto-database-data-set
+ short-summary: "A kusto database data set."
+ long-summary: |
+ Usage: --kusto-database-data-set kusto-database-resource-id=XX kind=XX
+
+ kusto-database-resource-id: Required. Resource id of the kusto database.
+ kind: Required. Kind of data set.
+ - name: --sqldb-table-data-set
+ short-summary: "A SQL DB table data set."
+ long-summary: |
+ Usage: --sqldb-table-data-set database-name=XX schema-name=XX sql-server-resource-id=XX table-name=XX \
+kind=XX
+
+ database-name: Database name of the source data set
+ schema-name: Schema of the table. Default value is dbo.
+ sql-server-resource-id: Resource id of SQL server
+ table-name: SQL DB table name.
+ kind: Required. Kind of data set.
+ - name: --sqldw-table-data-set
+ short-summary: "A SQL DW table data set."
+ long-summary: |
+ Usage: --sqldw-table-data-set data-warehouse-name=XX schema-name=XX sql-server-resource-id=XX \
+table-name=XX kind=XX
+
+ data-warehouse-name: DataWarehouse name of the source data set
+ schema-name: Schema of the table. Default value is dbo.
+ sql-server-resource-id: Resource id of SQL server
+ table-name: SQL DW table name.
+ kind: Required. Kind of data set.
+ - name: --synapse-workspace-sql-pool-table-data-set
+ short-summary: "A Synapse Workspace Sql Pool Table data set."
+ long-summary: |
+ Usage: --synapse-workspace-sql-pool-table-data-set synapse-workspace-sql-pool-table-resource-id=XX kind=XX
+
+ synapse-workspace-sql-pool-table-resource-id: Required. Resource id of the Synapse Workspace SQL Pool \
+Table
+ kind: Required. Kind of data set.
+ examples:
+ - name: DataSets_Create
+ text: |-
+ az datashare data-set create --account-name "Account1" --blob-data-set container-name="C1" \
+file-path="file21" resource-group="SampleResourceGroup" storage-account-name="storage2" subscription-id="433a8dfd-e5d5-\
+4e77-ad86-90acdc75eb1a" --name "Dataset1" --resource-group "SampleResourceGroup" --share-name "Share1"
+ - name: DataSets_KustoCluster_Create
+ text: |-
+ az datashare data-set create --account-name "Account1" --kusto-cluster-data-set \
+kusto-cluster-resource-id="/subscriptions/433a8dfd-e5d5-4e77-ad86-90acdc75eb1a/resourceGroups/SampleResourceGroup/provi\
+ders/Microsoft.Kusto/clusters/Cluster1" --name "Dataset1" --resource-group "SampleResourceGroup" --share-name "Share1"
+ - name: DataSets_KustoDatabase_Create
+ text: |-
+ az datashare data-set create --account-name "Account1" --kusto-database-data-set \
+kusto-database-resource-id="/subscriptions/433a8dfd-e5d5-4e77-ad86-90acdc75eb1a/resourceGroups/SampleResourceGroup/prov\
+iders/Microsoft.Kusto/clusters/Cluster1/databases/Database1" --name "Dataset1" --resource-group "SampleResourceGroup" \
+--share-name "Share1"
+ - name: DataSets_SqlDBTable_Create
+ text: |-
+ az datashare data-set create --account-name "Account1" --sqldb-table-data-set database-name="SqlDB1" \
+schema-name="dbo" sql-server-resource-id="/subscriptions/433a8dfd-e5d5-4e77-ad86-90acdc75eb1a/resourceGroups/SampleReso\
+urceGroup/providers/Microsoft.Sql/servers/Server1" table-name="Table1" --name "Dataset1" --resource-group \
+"SampleResourceGroup" --share-name "Share1"
+ - name: DataSets_SqlDWTable_Create
+ text: |-
+ az datashare data-set create --account-name "Account1" --sqldw-table-data-set \
+data-warehouse-name="DataWarehouse1" schema-name="dbo" sql-server-resource-id="/subscriptions/433a8dfd-e5d5-4e77-ad86-9\
+0acdc75eb1a/resourceGroups/SampleResourceGroup/providers/Microsoft.Sql/servers/Server1" table-name="Table1" --name \
+"Dataset1" --resource-group "SampleResourceGroup" --share-name "Share1"
+ - name: DataSets_SynapseWorkspaceSqlPoolTable_Create
+ text: |-
+ az datashare data-set create --account-name "sourceAccount" --synapse-workspace-sql-pool-table-data-set \
+synapse-workspace-sql-pool-table-resource-id="/subscriptions/0f3dcfc3-18f8-4099-b381-8353e19d43a7/resourceGroups/Sample\
+ResourceGroup/providers/Microsoft.Synapse/workspaces/ExampleWorkspace/sqlPools/ExampleSqlPool/schemas/dbo/tables/table1\
+" --name "dataset1" --resource-group "SampleResourceGroup" --share-name "share1"
+"""
+
+helps['datashare data-set delete'] = """
+ type: command
+ short-summary: "Delete a DataSet in a share."
+ examples:
+ - name: DataSets_Delete
+ text: |-
+ az datashare data-set delete --account-name "Account1" --name "Dataset1" --resource-group \
+"SampleResourceGroup" --share-name "Share1"
+"""
+
+helps['datashare data-set wait'] = """
+ type: command
+ short-summary: Place the CLI in a waiting state until a condition of the datashare data-set is met.
+ examples:
+ - name: Pause executing next line of CLI script until the datashare data-set is successfully deleted.
+ text: |-
+ az datashare data-set wait --account-name "Account1" --name "Dataset1" --resource-group \
+"SampleResourceGroup" --share-name "Share1" --deleted
+"""
+
+helps['datashare data-set-mapping'] = """
+ type: group
+ short-summary: Manage data set mapping with datashare
+"""
+
+helps['datashare data-set-mapping list'] = """
+ type: command
+ short-summary: "List DataSetMappings in a share subscription."
+ examples:
+ - name: DataSetMappings_ListByShareSubscription
+ text: |-
+ az datashare data-set-mapping list --account-name "Account1" --resource-group "SampleResourceGroup" \
+--share-subscription-name "ShareSubscription1"
+"""
+
+helps['datashare data-set-mapping show'] = """
+ type: command
+ short-summary: "Get a DataSetMapping in a shareSubscription."
+ examples:
+ - name: DataSetMappings_Get
+ text: |-
+ az datashare data-set-mapping show --account-name "Account1" --name "DatasetMapping1" --resource-group \
+"SampleResourceGroup" --share-subscription-name "ShareSubscription1"
+"""
+
+helps['datashare data-set-mapping create'] = """
+ type: command
+ short-summary: "Create a DataSetMapping."
+ parameters:
+ - name: --adls-gen2-file-data-set-mapping
+ short-summary: "An ADLS Gen2 file data set mapping."
+ long-summary: |
+ Usage: --adls-gen2-file-data-set-mapping data-set-id=XX file-path=XX file-system=XX output-type=XX \
+resource-group=XX storage-account-name=XX subscription-id=XX kind=XX
+
+ data-set-id: Required. The id of the source data set.
+ file-path: Required. File path within the file system.
+ file-system: Required. File system to which the file belongs.
+ output-type: Type of output file
+ resource-group: Required. Resource group of storage account.
+ storage-account-name: Required. Storage account name of the source data set.
+ subscription-id: Required. Subscription id of storage account.
+ kind: Required. Kind of data set mapping.
+ - name: --adls-gen2-file-system-data-set-mapping
+ short-summary: "An ADLS Gen2 file system data set mapping."
+ long-summary: |
+ Usage: --adls-gen2-file-system-data-set-mapping data-set-id=XX file-system=XX resource-group=XX \
+storage-account-name=XX subscription-id=XX kind=XX
+
+ data-set-id: Required. The id of the source data set.
+ file-system: Required. The file system name.
+ resource-group: Required. Resource group of storage account.
+ storage-account-name: Required. Storage account name of the source data set.
+ subscription-id: Required. Subscription id of storage account.
+ kind: Required. Kind of data set mapping.
+ - name: --adls-gen2-folder-data-set-mapping
+ short-summary: "An ADLS Gen2 folder data set mapping."
+ long-summary: |
+ Usage: --adls-gen2-folder-data-set-mapping data-set-id=XX file-system=XX folder-path=XX resource-group=XX \
+storage-account-name=XX subscription-id=XX kind=XX
+
+ data-set-id: Required. The id of the source data set.
+ file-system: Required. File system to which the folder belongs.
+ folder-path: Required. Folder path within the file system.
+ resource-group: Required. Resource group of storage account.
+ storage-account-name: Required. Storage account name of the source data set.
+ subscription-id: Required. Subscription id of storage account.
+ kind: Required. Kind of data set mapping.
+ - name: --blob-container-data-set-mapping
+ short-summary: "A Blob container data set mapping."
+ long-summary: |
+ Usage: --blob-container-data-set-mapping container-name=XX data-set-id=XX resource-group=XX \
+storage-account-name=XX subscription-id=XX kind=XX
+
+ container-name: Required. BLOB Container name.
+ data-set-id: Required. The id of the source data set.
+ resource-group: Required. Resource group of storage account.
+ storage-account-name: Required. Storage account name of the source data set.
+ subscription-id: Required. Subscription id of storage account.
+ kind: Required. Kind of data set mapping.
+ - name: --blob-data-set-mapping
+ short-summary: "A Blob data set mapping."
+ long-summary: |
+ Usage: --blob-data-set-mapping container-name=XX data-set-id=XX file-path=XX output-type=XX \
+resource-group=XX storage-account-name=XX subscription-id=XX kind=XX
+
+ container-name: Required. Container that has the file path.
+ data-set-id: Required. The id of the source data set.
+ file-path: Required. File path within the source data set
+ output-type: File output type
+ resource-group: Required. Resource group of storage account.
+ storage-account-name: Required. Storage account name of the source data set.
+ subscription-id: Required. Subscription id of storage account.
+ kind: Required. Kind of data set mapping.
+ - name: --blob-folder-data-set-mapping
+ short-summary: "A Blob folder data set mapping."
+ long-summary: |
+ Usage: --blob-folder-data-set-mapping container-name=XX data-set-id=XX prefix=XX resource-group=XX \
+storage-account-name=XX subscription-id=XX kind=XX
+
+ container-name: Required. Container that has the file path.
+ data-set-id: Required. The id of the source data set.
+ prefix: Required. Prefix for blob folder
+ resource-group: Required. Resource group of storage account.
+ storage-account-name: Required. Storage account name of the source data set.
+ subscription-id: Required. Subscription id of storage account.
+ kind: Required. Kind of data set mapping.
+ - name: --kusto-cluster-data-set-mapping
+ short-summary: "A Kusto cluster data set mapping"
+ long-summary: |
+ Usage: --kusto-cluster-data-set-mapping data-set-id=XX kusto-cluster-resource-id=XX kind=XX
+
+ data-set-id: Required. The id of the source data set.
+ kusto-cluster-resource-id: Required. Resource id of the sink kusto cluster.
+ kind: Required. Kind of data set mapping.
+ - name: --kusto-database-data-set-mapping
+ short-summary: "A Kusto database data set mapping"
+ long-summary: |
+ Usage: --kusto-database-data-set-mapping data-set-id=XX kusto-cluster-resource-id=XX kind=XX
+
+ data-set-id: Required. The id of the source data set.
+ kusto-cluster-resource-id: Required. Resource id of the sink kusto cluster.
+ kind: Required. Kind of data set mapping.
+ - name: --sqldb-table-data-set-mapping
+ short-summary: "A SQL DB Table data set mapping."
+ long-summary: |
+ Usage: --sqldb-table-data-set-mapping database-name=XX data-set-id=XX schema-name=XX \
+sql-server-resource-id=XX table-name=XX kind=XX
+
+ database-name: Required. DatabaseName name of the sink data set
+ data-set-id: Required. The id of the source data set.
+ schema-name: Required. Schema of the table. Default value is dbo.
+ sql-server-resource-id: Required. Resource id of SQL server
+ table-name: Required. SQL DB table name.
+ kind: Required. Kind of data set mapping.
+ - name: --sqldw-table-data-set-mapping
+ short-summary: "A SQL DW Table data set mapping."
+ long-summary: |
+ Usage: --sqldw-table-data-set-mapping data-set-id=XX data-warehouse-name=XX schema-name=XX \
+sql-server-resource-id=XX table-name=XX kind=XX
+
+ data-set-id: Required. The id of the source data set.
+ data-warehouse-name: Required. DataWarehouse name of the source data set
+ schema-name: Required. Schema of the table. Default value is dbo.
+ sql-server-resource-id: Required. Resource id of SQL server
+ table-name: Required. SQL DW table name.
+ kind: Required. Kind of data set mapping.
+ - name: --synapse-workspace-sql-pool-table-data-set-mapping
+ short-summary: "A Synapse Workspace Sql Pool Table data set mapping"
+ long-summary: |
+ Usage: --synapse-workspace-sql-pool-table-data-set-mapping data-set-id=XX synapse-workspace-sql-pool-table-\
+resource-id=XX kind=XX
+
+ data-set-id: Required. The id of the source data set.
+ synapse-workspace-sql-pool-table-resource-id: Required. Resource id of the Synapse Workspace SQL Pool \
+Table
+ kind: Required. Kind of data set mapping.
+ examples:
+ - name: DataSetMappings_Create
+ text: |-
+ az datashare data-set-mapping create --account-name "Account1" --blob-data-set-mapping \
+container-name="C1" data-set-id="a08f184b-0567-4b11-ba22-a1199336d226" file-path="file21" \
+resource-group="SampleResourceGroup" storage-account-name="storage2" subscription-id="433a8dfd-e5d5-4e77-ad86-90acdc75e\
+b1a" --name "DatasetMapping1" --resource-group "SampleResourceGroup" --share-subscription-name "ShareSubscription1"
+ - name: DataSetMappings_SqlDB_Create
+ text: |-
+ az datashare data-set-mapping create --account-name "Account1" --sqldb-table-data-set-mapping \
+database-name="Database1" data-set-id="a08f184b-0567-4b11-ba22-a1199336d226" schema-name="dbo" \
+sql-server-resource-id="/subscriptions/433a8dfd-e5d5-4e77-ad86-90acdc75eb1a/resourceGroups/SampleResourceGroup/provider\
+s/Microsoft.Sql/servers/Server1" table-name="Table1" --name "DatasetMapping1" --resource-group "SampleResourceGroup" \
+--share-subscription-name "ShareSubscription1"
+ - name: DataSetMappings_SqlDWDataSetToAdlsGen2File_Create
+ text: |-
+ az datashare data-set-mapping create --account-name "Account1" --adls-gen2-file-data-set-mapping \
+data-set-id="a08f184b-0567-4b11-ba22-a1199336d226" file-path="file21" file-system="fileSystem" output-type="Csv" \
+resource-group="SampleResourceGroup" storage-account-name="storage2" subscription-id="433a8dfd-e5d5-4e77-ad86-90acdc75e\
+b1a" --name "DatasetMapping1" --resource-group "SampleResourceGroup" --share-subscription-name "ShareSubscription1"
+ - name: DataSetMappings_SqlDW_Create
+ text: |-
+ az datashare data-set-mapping create --account-name "Account1" --sqldw-table-data-set-mapping \
+data-set-id="a08f184b-0567-4b11-ba22-a1199336d226" data-warehouse-name="DataWarehouse1" schema-name="dbo" \
+sql-server-resource-id="/subscriptions/433a8dfd-e5d5-4e77-ad86-90acdc75eb1a/resourceGroups/SampleResourceGroup/provider\
+s/Microsoft.Sql/servers/Server1" table-name="Table1" --name "DatasetMapping1" --resource-group "SampleResourceGroup" \
+--share-subscription-name "ShareSubscription1"
+ - name: DataSetMappings_SynapseWorkspaceSqlPoolTable_Create
+ text: |-
+ az datashare data-set-mapping create --account-name "consumerAccount" --synapse-workspace-sql-pool-table\
+-data-set-mapping data-set-id="3dc64e49-1fc3-4186-b3dc-d388c4d3076a" synapse-workspace-sql-pool-table-resource-id="/sub\
+scriptions/0f3dcfc3-18f8-4099-b381-8353e19d43a7/resourceGroups/SampleResourceGroup/providers/Microsoft.Synapse/workspac\
+es/ExampleWorkspace/sqlPools/ExampleSqlPool/schemas/dbo/tables/table1" --name "datasetMappingName1" --resource-group \
+"SampleResourceGroup" --share-subscription-name "ShareSubscription1"
+"""
+
+helps['datashare data-set-mapping delete'] = """
+ type: command
+ short-summary: "Delete a DataSetMapping in a shareSubscription."
+ examples:
+ - name: DataSetMappings_Delete
+ text: |-
+ az datashare data-set-mapping delete --account-name "Account1" --name "DatasetMapping1" \
+--resource-group "SampleResourceGroup" --share-subscription-name "ShareSubscription1"
+"""
+
+helps['datashare invitation'] = """
+ type: group
+ short-summary: Manage invitation with datashare
+"""
+
+helps['datashare invitation list'] = """
+ type: command
+ short-summary: "List invitations in a share."
+ examples:
+ - name: Invitations_ListByShare
+ text: |-
+ az datashare invitation list --account-name "Account1" --resource-group "SampleResourceGroup" \
+--share-name "Share1"
+"""
+
+helps['datashare invitation show'] = """
+ type: command
+ short-summary: "Get an invitation in a share."
+ examples:
+ - name: Invitations_Get
+ text: |-
+ az datashare invitation show --account-name "Account1" --name "Invitation1" --resource-group \
+"SampleResourceGroup" --share-name "Share1"
+"""
+
+helps['datashare invitation create'] = """
+ type: command
+ short-summary: "Create an invitation."
+ examples:
+ - name: Invitations_Create
+ text: |-
+ az datashare invitation create --account-name "Account1" --expiration-date \
+"2020-08-26T22:33:24.5785265Z" --target-email "receiver@microsoft.com" --name "Invitation1" --resource-group \
+"SampleResourceGroup" --share-name "Share1"
+"""
+
+helps['datashare invitation delete'] = """
+ type: command
+ short-summary: "Delete an invitation in a share."
+ examples:
+ - name: Invitations_Delete
+ text: |-
+ az datashare invitation delete --account-name "Account1" --name "Invitation1" --resource-group \
+"SampleResourceGroup" --share-name "Share1"
+"""
+
+helps['datashare'] = """
+ type: group
+ short-summary: Manage share with datashare
+"""
+
+helps['datashare list'] = """
+ type: command
+ short-summary: "List shares in an account."
+ examples:
+ - name: Shares_ListByAccount
+ text: |-
+ az datashare list --account-name "Account1" --resource-group "SampleResourceGroup"
+"""
+
+helps['datashare show'] = """
+ type: command
+ short-summary: "Get a share."
+ examples:
+ - name: Shares_Get
+ text: |-
+ az datashare show --account-name "Account1" --resource-group "SampleResourceGroup" --name "Share1"
+"""
+
+helps['datashare create'] = """
+ type: command
+ short-summary: "Create a share."
+ examples:
+ - name: Shares_Create
+ text: |-
+ az datashare create --account-name "Account1" --resource-group "SampleResourceGroup" --description \
+"share description" --share-kind "CopyBased" --terms "Confidential" --name "Share1"
+"""
+
+helps['datashare delete'] = """
+ type: command
+ short-summary: "Delete a share."
+ examples:
+ - name: Shares_Delete
+ text: |-
+ az datashare delete --account-name "Account1" --resource-group "SampleResourceGroup" --name "Share1"
+"""
+
+helps['datashare list-synchronization'] = """
+ type: command
+ short-summary: "List synchronizations of a share."
+ examples:
+ - name: Shares_ListSynchronizations
+ text: |-
+ az datashare list-synchronization --account-name "Account1" --resource-group "SampleResourceGroup" \
+--name "Share1"
+"""
+
+helps['datashare list-synchronization-detail'] = """
+ type: command
+ short-summary: "List synchronization details."
+ examples:
+ - name: Shares_ListSynchronizationDetails
+ text: |-
+ az datashare list-synchronization-detail --account-name "Account1" --resource-group \
+"SampleResourceGroup" --name "Share1" --synchronization-id "7d0536a6-3fa5-43de-b152-3d07c4f6b2bb"
+"""
+
+helps['datashare wait'] = """
+ type: command
+ short-summary: Place the CLI in a waiting state until a condition of the datashare is met.
+ examples:
+ - name: Pause executing next line of CLI script until the datashare is successfully deleted.
+ text: |-
+ az datashare wait --account-name "Account1" --resource-group "SampleResourceGroup" --name "Share1" \
+--deleted
+"""
+
+helps['datashare provider-share-subscription'] = """
+ type: group
+ short-summary: Manage provider share subscription with datashare
+"""
+
+helps['datashare provider-share-subscription list'] = """
+ type: command
+ short-summary: "List share subscriptions in a provider share."
+ examples:
+ - name: ProviderShareSubscriptions_ListByShare
+ text: |-
+ az datashare provider-share-subscription list --account-name "Account1" --resource-group \
+"SampleResourceGroup" --share-name "Share1"
+"""
+
+helps['datashare provider-share-subscription show'] = """
+ type: command
+ short-summary: "Get share subscription in a provider share."
+ examples:
+ - name: ProviderShareSubscriptions_GetByShare
+ text: |-
+ az datashare provider-share-subscription show --account-name "Account1" --provider-share-subscription-id\
+ "4256e2cf-0f82-4865-961b-12f83333f487" --resource-group "SampleResourceGroup" --share-name "Share1"
+"""
+
+helps['datashare provider-share-subscription adjust'] = """
+ type: command
+ short-summary: "Adjust a share subscription's expiration date in a provider share."
+ examples:
+ - name: ProviderShareSubscriptions_Adjust
+ text: |-
+ az datashare provider-share-subscription adjust --account-name "Account1" --expiration-date \
+"2020-12-26T22:33:24.5785265Z" --provider-share-subscription-id "4256e2cf-0f82-4865-961b-12f83333f487" \
+--resource-group "SampleResourceGroup" --share-name "Share1"
+"""
+
+helps['datashare provider-share-subscription reinstate'] = """
+ type: command
+ short-summary: "Reinstate share subscription in a provider share."
+ examples:
+ - name: ProviderShareSubscriptions_Reinstate
+ text: |-
+ az datashare provider-share-subscription reinstate --account-name "Account1" --expiration-date \
+"2020-12-26T22:33:24.5785265Z" --provider-share-subscription-id "4256e2cf-0f82-4865-961b-12f83333f487" \
+--resource-group "SampleResourceGroup" --share-name "Share1"
+"""
+
+helps['datashare provider-share-subscription revoke'] = """
+ type: command
+ short-summary: "Revoke share subscription in a provider share."
+ examples:
+ - name: ProviderShareSubscriptions_Revoke
+ text: |-
+ az datashare provider-share-subscription revoke --account-name "Account1" \
+--provider-share-subscription-id "4256e2cf-0f82-4865-961b-12f83333f487" --resource-group "SampleResourceGroup" \
+--share-name "Share1"
+"""
+
+helps['datashare provider-share-subscription wait'] = """
+ type: command
+ short-summary: Place the CLI in a waiting state until a condition of the datashare provider-share-subscription is \
+met.
+ examples:
+ - name: Pause executing next line of CLI script until the datashare provider-share-subscription is successfully \
+created.
+ text: |-
+ az datashare provider-share-subscription wait --account-name "Account1" --provider-share-subscription-id\
+ "4256e2cf-0f82-4865-961b-12f83333f487" --resource-group "SampleResourceGroup" --share-name "Share1" --created
+"""
+
+helps['datashare share-subscription'] = """
+ type: group
+ short-summary: Manage share subscription with datashare
+"""
+
+helps['datashare share-subscription list'] = """
+ type: command
+ short-summary: "List share subscriptions in an account."
+ examples:
+ - name: ShareSubscriptions_ListByAccount
+ text: |-
+ az datashare share-subscription list --account-name "Account1" --resource-group "SampleResourceGroup"
+"""
+
+helps['datashare share-subscription show'] = """
+ type: command
+ short-summary: "Get a shareSubscription in an account."
+ examples:
+ - name: ShareSubscriptions_Get
+ text: |-
+ az datashare share-subscription show --account-name "Account1" --resource-group "SampleResourceGroup" \
+--name "ShareSubscription1"
+"""
+
+helps['datashare share-subscription create'] = """
+ type: command
+ short-summary: "Create a shareSubscription in an account."
+ examples:
+ - name: ShareSubscriptions_Create
+ text: |-
+ az datashare share-subscription create --account-name "Account1" --resource-group "SampleResourceGroup" \
+--expiration-date "2020-08-26T22:33:24.5785265Z" --invitation-id "12345678-1234-1234-12345678abd" \
+--source-share-location "eastus2" --name "ShareSubscription1"
+"""
+
+helps['datashare share-subscription delete'] = """
+ type: command
+ short-summary: "Delete a shareSubscription in an account."
+ examples:
+ - name: ShareSubscriptions_Delete
+ text: |-
+ az datashare share-subscription delete --account-name "Account1" --resource-group "SampleResourceGroup" \
+--name "ShareSubscription1"
+"""
+
+helps['datashare share-subscription cancel-synchronization'] = """
+ type: command
+ short-summary: "Request to cancel a synchronization."
+ examples:
+ - name: ShareSubscriptions_CancelSynchronization
+ text: |-
+ az datashare share-subscription cancel-synchronization --account-name "Account1" --resource-group \
+"SampleResourceGroup" --name "ShareSubscription1" --synchronization-id "7d0536a6-3fa5-43de-b152-3d07c4f6b2bb"
+"""
+
+helps['datashare share-subscription list-source-share-synchronization-setting'] = """
+ type: command
+ short-summary: "Get synchronization settings set on a share."
+ examples:
+ - name: ShareSubscriptions_ListSourceShareSynchronizationSettings
+ text: |-
+ az datashare share-subscription list-source-share-synchronization-setting --account-name "Account1" \
+--resource-group "SampleResourceGroup" --name "ShareSub1"
+"""
+
+helps['datashare share-subscription list-synchronization'] = """
+ type: command
+ short-summary: "List synchronizations of a share subscription."
+ examples:
+ - name: ShareSubscriptions_ListSynchronizations
+ text: |-
+ az datashare share-subscription list-synchronization --account-name "Account1" --resource-group \
+"SampleResourceGroup" --name "ShareSub1"
+"""
+
+helps['datashare share-subscription list-synchronization-detail'] = """
+ type: command
+ short-summary: "List synchronization details."
+ examples:
+ - name: ShareSubscriptions_ListSynchronizationDetails
+ text: |-
+ az datashare share-subscription list-synchronization-detail --account-name "Account1" --resource-group \
+"SampleResourceGroup" --name "ShareSub1" --synchronization-id "7d0536a6-3fa5-43de-b152-3d07c4f6b2bb"
+"""
+
+helps['datashare share-subscription synchronize'] = """
+ type: command
+ short-summary: "Initiate a copy."
+ examples:
+ - name: ShareSubscriptions_Synchronize
+ text: |-
+ az datashare share-subscription synchronize --account-name "Account1" --resource-group \
+"SampleResourceGroup" --name "ShareSubscription1" --synchronization-mode "Incremental"
+"""
+
+helps['datashare share-subscription wait'] = """
+ type: command
+ short-summary: Place the CLI in a waiting state until a condition of the datashare share-subscription is met.
+ examples:
+ - name: Pause executing next line of CLI script until the datashare share-subscription is successfully deleted.
+ text: |-
+ az datashare share-subscription wait --account-name "Account1" --resource-group "SampleResourceGroup" \
+--name "ShareSubscription1" --deleted
+ - name: Pause executing next line of CLI script until the datashare share-subscription is successfully created.
+ text: |-
+ az datashare share-subscription wait --account-name "Account1" --resource-group "SampleResourceGroup" \
+--name "ShareSubscription1" --created
+"""
+
+helps['datashare consumer-source-data-set'] = """
+ type: group
+ short-summary: Manage consumer source data set with datashare
+"""
+
+helps['datashare consumer-source-data-set list'] = """
+ type: command
+ short-summary: "Get source dataSets of a shareSubscription."
+ examples:
+ - name: ConsumerSourceDataSets_ListByShareSubscription
+ text: |-
+ az datashare consumer-source-data-set list --account-name "Account1" --resource-group \
+"SampleResourceGroup" --share-subscription-name "Share1"
+"""
+
+helps['datashare synchronization-setting'] = """
+ type: group
+ short-summary: Manage synchronization setting with datashare
+"""
+
+helps['datashare synchronization-setting list'] = """
+ type: command
+ short-summary: "List synchronizationSettings in a share."
+ examples:
+ - name: SynchronizationSettings_ListByShare
+ text: |-
+ az datashare synchronization-setting list --account-name "Account1" --resource-group \
+"SampleResourceGroup" --share-name "Share1"
+"""
+
+helps['datashare synchronization-setting show'] = """
+ type: command
+ short-summary: "Get a synchronizationSetting in a share."
+ examples:
+ - name: SynchronizationSettings_Get
+ text: |-
+ az datashare synchronization-setting show --account-name "Account1" --resource-group \
+"SampleResourceGroup" --share-name "Share1" --name "SynchronizationSetting1"
+"""
+
+helps['datashare synchronization-setting create'] = """
+ type: command
+ short-summary: "Create a synchronizationSetting."
+ parameters:
+ - name: --scheduled-synchronization-setting
+ short-summary: "A type of synchronization setting based on schedule"
+ long-summary: |
+ Usage: --scheduled-synchronization-setting recurrence-interval=XX synchronization-time=XX kind=XX
+
+ recurrence-interval: Required. Recurrence Interval
+ synchronization-time: Required. Synchronization time
+ kind: Required. Kind of synchronization setting.
+ examples:
+ - name: SynchronizationSettings_Create
+ text: |-
+ az datashare synchronization-setting create --account-name "Account1" --resource-group \
+"SampleResourceGroup" --share-name "Share1" --scheduled-synchronization-setting recurrence-interval="Day" \
+synchronization-time="2018-11-14T04:47:52.9614956Z" --name "Dataset1"
+"""
+
+helps['datashare synchronization-setting delete'] = """
+ type: command
+ short-summary: "Delete a synchronizationSetting in a share."
+ examples:
+ - name: SynchronizationSettings_Delete
+ text: |-
+ az datashare synchronization-setting delete --account-name "Account1" --resource-group \
+"SampleResourceGroup" --share-name "Share1" --name "SynchronizationSetting1"
+"""
+
+helps['datashare synchronization-setting wait'] = """
+ type: command
+ short-summary: Place the CLI in a waiting state until a condition of the datashare synchronization-setting is met.
+ examples:
+ - name: Pause executing next line of CLI script until the datashare synchronization-setting is successfully \
+deleted.
+ text: |-
+ az datashare synchronization-setting wait --account-name "Account1" --resource-group \
+"SampleResourceGroup" --share-name "Share1" --name "SynchronizationSetting1" --deleted
+"""
+
+helps['datashare trigger'] = """
+ type: group
+ short-summary: Manage trigger with datashare
+"""
+
+helps['datashare trigger list'] = """
+ type: command
+ short-summary: "List Triggers in a share subscription."
+ examples:
+ - name: Triggers_ListByShareSubscription
+ text: |-
+ az datashare trigger list --account-name "Account1" --resource-group "SampleResourceGroup" \
+--share-subscription-name "ShareSubscription1"
+"""
+
+helps['datashare trigger show'] = """
+ type: command
+ short-summary: "Get a Trigger in a shareSubscription."
+ examples:
+ - name: Triggers_Get
+ text: |-
+ az datashare trigger show --account-name "Account1" --resource-group "SampleResourceGroup" \
+--share-subscription-name "ShareSubscription1" --name "Trigger1"
+"""
+
+helps['datashare trigger create'] = """
+ type: command
+ short-summary: "Create a Trigger."
+ parameters:
+ - name: --scheduled-trigger
+ short-summary: "A type of trigger based on schedule"
+ long-summary: |
+ Usage: --scheduled-trigger recurrence-interval=XX synchronization-mode=XX synchronization-time=XX kind=XX
+
+ recurrence-interval: Required. Recurrence Interval
+ synchronization-mode: Synchronization mode
+ synchronization-time: Required. Synchronization time
+ kind: Required. Kind of synchronization on trigger.
+ examples:
+ - name: Triggers_Create
+ text: |-
+ az datashare trigger create --account-name "Account1" --resource-group "SampleResourceGroup" \
+--share-subscription-name "ShareSubscription1" --scheduled-trigger recurrence-interval="Day" \
+synchronization-mode="Incremental" synchronization-time="2018-11-14T04:47:52.9614956Z" --name "Trigger1"
+"""
+
+helps['datashare trigger delete'] = """
+ type: command
+ short-summary: "Delete a Trigger in a shareSubscription."
+ examples:
+ - name: Triggers_Delete
+ text: |-
+ az datashare trigger delete --account-name "Account1" --resource-group "SampleResourceGroup" \
+--share-subscription-name "ShareSubscription1" --name "Trigger1"
+"""
+
+helps['datashare trigger wait'] = """
+ type: command
+ short-summary: Place the CLI in a waiting state until a condition of the datashare trigger is met.
+ examples:
+ - name: Pause executing next line of CLI script until the datashare trigger is successfully created.
+ text: |-
+ az datashare trigger wait --account-name "Account1" --resource-group "SampleResourceGroup" \
+--share-subscription-name "ShareSubscription1" --name "Trigger1" --created
+ - name: Pause executing next line of CLI script until the datashare trigger is successfully deleted.
+ text: |-
+ az datashare trigger wait --account-name "Account1" --resource-group "SampleResourceGroup" \
+--share-subscription-name "ShareSubscription1" --name "Trigger1" --deleted
+"""
diff --git a/src/datashare/azext_datashare/generated/_params.py b/src/datashare/azext_datashare/generated/_params.py
new file mode 100644
index 00000000000..f5c903f5131
--- /dev/null
+++ b/src/datashare/azext_datashare/generated/_params.py
@@ -0,0 +1,509 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=too-many-lines
+# pylint: disable=too-many-statements
+
+from azure.cli.core.commands.parameters import (
+ tags_type,
+ get_enum_type,
+ resource_group_name_type,
+ get_location_type
+)
+from azure.cli.core.commands.validators import get_default_location_from_resource_group
+from azext_datashare.action import (
+ AddAdlsGen1FileDataSet,
+ AddAdlsGen1FolderDataSet,
+ AddAdlsGen2FileDataSet,
+ AddAdlsGen2FileSystemDataSet,
+ AddAdlsGen2FolderDataSet,
+ AddBlobContainerDataSet,
+ AddBlobDataSet,
+ AddBlobFolderDataSet,
+ AddKustoClusterDataSet,
+ AddKustoDatabaseDataSet,
+ AddSqldbTableDataSet,
+ AddSqldwTableDataSet,
+ AddSynapseWorkspaceSqlPoolTableDataSet,
+ AddAdlsGen2FileDataSetMapping,
+ AddAdlsGen2FileSystemDataSetMapping,
+ AddAdlsGen2FolderDataSetMapping,
+ AddBlobContainerDataSetMapping,
+ AddBlobDataSetMapping,
+ AddBlobFolderDataSetMapping,
+ AddKustoClusterDataSetMapping,
+ AddKustoDatabaseDataSetMapping,
+ AddSqldbTableDataSetMapping,
+ AddSqldwTableDataSetMapping,
+ AddSynapseWorkspaceSqlPoolTableDataSetMapping,
+ AddScheduledSynchronizationSetting,
+ AddScheduledTrigger
+)
+
+
+def load_arguments(self, _):
+
+ with self.argument_context('datashare account list') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('skip_token', type=str, help='Continuation token')
+
+ with self.argument_context('datashare account show') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', options_list=['--name', '-n', '--account-name'], type=str, help='The name of the '
+ 'share account.', id_part='name')
+
+ with self.argument_context('datashare account create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', options_list=['--name', '-n', '--account-name'], type=str, help='The name of the '
+ 'share account.')
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('tags', tags_type)
+
+ with self.argument_context('datashare account update') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', options_list=['--name', '-n', '--account-name'], type=str, help='The name of the '
+ 'share account.', id_part='name')
+ c.argument('tags', tags_type)
+
+ with self.argument_context('datashare account delete') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', options_list=['--name', '-n', '--account-name'], type=str, help='The name of the '
+ 'share account.', id_part='name')
+
+ with self.argument_context('datashare account wait') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', options_list=['--name', '-n', '--account-name'], type=str, help='The name of the '
+ 'share account.', id_part='name')
+
+ with self.argument_context('datashare consumer-invitation show') as c:
+ c.argument('location', arg_type=get_location_type(self.cli_ctx))
+ c.argument('invitation_id', type=str, help='An invitation id')
+
+ with self.argument_context('datashare consumer-invitation list-invitation') as c:
+ c.argument('skip_token', type=str, help='The continuation token')
+
+ with self.argument_context('datashare consumer-invitation reject-invitation') as c:
+ c.argument('location', arg_type=get_location_type(self.cli_ctx))
+ c.argument('invitation_id', type=str, help='Unique id of the invitation.')
+
+ with self.argument_context('datashare data-set list') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.')
+ c.argument('share_name', type=str, help='The name of the share.')
+ c.argument('skip_token', type=str, help='continuation token')
+ c.argument('filter_', options_list=['--filter'], type=str, help='Filters the results using OData syntax.')
+ c.argument('orderby', type=str, help='Sorts the results using OData syntax.')
+
+ with self.argument_context('datashare data-set show') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.', id_part='name')
+ c.argument('share_name', type=str, help='The name of the share.', id_part='child_name_1')
+ c.argument('data_set_name', options_list=['--name', '-n', '--data-set-name'], type=str, help='The name of the '
+ 'dataSet.', id_part='child_name_2')
+
+ with self.argument_context('datashare data-set create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.')
+ c.argument('share_name', type=str, help='The name of the share to add the data set to.')
+ c.argument('data_set_name', options_list=['--name', '-n', '--data-set-name'], type=str, help='The name of the '
+ 'dataSet.')
+ c.argument('adls_gen1_file_data_set', action=AddAdlsGen1FileDataSet, nargs='+', help='An ADLS Gen 1 file data '
+ 'set.', arg_group='DataSet')
+ c.argument('adls_gen1_folder_data_set', action=AddAdlsGen1FolderDataSet, nargs='+', help='An ADLS Gen 1 folder '
+ 'data set.', arg_group='DataSet')
+ c.argument('adls_gen2_file_data_set', action=AddAdlsGen2FileDataSet, nargs='+', help='An ADLS Gen 2 file data '
+ 'set.', arg_group='DataSet')
+ c.argument('adls_gen2_file_system_data_set', action=AddAdlsGen2FileSystemDataSet, nargs='+', help='An ADLS Gen '
+ '2 file system data set.', arg_group='DataSet')
+ c.argument('adls_gen2_folder_data_set', action=AddAdlsGen2FolderDataSet, nargs='+', help='An ADLS Gen 2 folder '
+ 'data set.', arg_group='DataSet')
+ c.argument('blob_container_data_set', action=AddBlobContainerDataSet, nargs='+', help='An Azure storage blob '
+ 'container data set.', arg_group='DataSet')
+ c.argument('blob_data_set', action=AddBlobDataSet, nargs='+', help='An Azure storage blob data set.',
+ arg_group='DataSet')
+ c.argument('blob_folder_data_set', action=AddBlobFolderDataSet, nargs='+', help='An Azure storage blob folder '
+ 'data set.', arg_group='DataSet')
+ c.argument('kusto_cluster_data_set', action=AddKustoClusterDataSet, nargs='+',
+ help='A kusto cluster data set.', arg_group='DataSet')
+ c.argument('kusto_database_data_set', action=AddKustoDatabaseDataSet, nargs='+', help='A kusto database data '
+ 'set.', arg_group='DataSet')
+ c.argument('sqldb_table_data_set', action=AddSqldbTableDataSet, nargs='+', help='A SQL DB table data set.',
+ arg_group='DataSet')
+ c.argument('sqldw_table_data_set', action=AddSqldwTableDataSet, nargs='+', help='A SQL DW table data set.',
+ arg_group='DataSet')
+ c.argument('synapse_workspace_sql_pool_table_data_set', action=AddSynapseWorkspaceSqlPoolTableDataSet,
+ nargs='+', help='A Synapse Workspace Sql Pool Table data set.', arg_group='DataSet')
+
+ with self.argument_context('datashare data-set delete') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.', id_part='name')
+ c.argument('share_name', type=str, help='The name of the share.', id_part='child_name_1')
+ c.argument('data_set_name', options_list=['--name', '-n', '--data-set-name'], type=str, help='The name of the '
+ 'dataSet.', id_part='child_name_2')
+
+ with self.argument_context('datashare data-set wait') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.', id_part='name')
+ c.argument('share_name', type=str, help='The name of the share.', id_part='child_name_1')
+ c.argument('data_set_name', options_list=['--name', '-n', '--data-set-name'], type=str, help='The name of the '
+ 'dataSet.', id_part='child_name_2')
+
+ with self.argument_context('datashare data-set-mapping list') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.')
+ c.argument('share_subscription_name', type=str, help='The name of the share subscription.')
+ c.argument('skip_token', type=str, help='Continuation token')
+ c.argument('filter_', options_list=['--filter'], type=str, help='Filters the results using OData syntax.')
+ c.argument('orderby', type=str, help='Sorts the results using OData syntax.')
+
+ with self.argument_context('datashare data-set-mapping show') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.', id_part='name')
+ c.argument('share_subscription_name', type=str, help='The name of the shareSubscription.',
+ id_part='child_name_1')
+ c.argument('data_set_mapping_name', options_list=['--name', '-n', '--data-set-mapping-name'], type=str,
+ help='The name of the dataSetMapping.', id_part='child_name_2')
+
+ with self.argument_context('datashare data-set-mapping create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.')
+ c.argument('share_subscription_name', type=str, help='The name of the share subscription which will hold the '
+ 'data set sink.')
+ c.argument('data_set_mapping_name', options_list=['--name', '-n', '--data-set-mapping-name'], type=str,
+ help='The name of the data set mapping to be created.')
+ c.argument('adls_gen2_file_data_set_mapping', action=AddAdlsGen2FileDataSetMapping, nargs='+', help='An ADLS '
+ 'Gen2 file data set mapping.', arg_group='DataSetMapping')
+ c.argument('adls_gen2_file_system_data_set_mapping', action=AddAdlsGen2FileSystemDataSetMapping, nargs='+',
+ help='An ADLS Gen2 file system data set mapping.', arg_group='DataSetMapping')
+ c.argument('adls_gen2_folder_data_set_mapping', action=AddAdlsGen2FolderDataSetMapping, nargs='+', help='An '
+ 'ADLS Gen2 folder data set mapping.', arg_group='DataSetMapping')
+ c.argument('blob_container_data_set_mapping', action=AddBlobContainerDataSetMapping, nargs='+', help='A Blob '
+ 'container data set mapping.', arg_group='DataSetMapping')
+ c.argument('blob_data_set_mapping', action=AddBlobDataSetMapping, nargs='+', help='A Blob data set mapping.',
+ arg_group='DataSetMapping')
+ c.argument('blob_folder_data_set_mapping', action=AddBlobFolderDataSetMapping, nargs='+', help='A Blob folder '
+ 'data set mapping.', arg_group='DataSetMapping')
+ c.argument('kusto_cluster_data_set_mapping', action=AddKustoClusterDataSetMapping, nargs='+', help='A Kusto '
+ 'cluster data set mapping', arg_group='DataSetMapping')
+ c.argument('kusto_database_data_set_mapping', action=AddKustoDatabaseDataSetMapping, nargs='+', help='A Kusto '
+ 'database data set mapping', arg_group='DataSetMapping')
+ c.argument('sqldb_table_data_set_mapping', action=AddSqldbTableDataSetMapping, nargs='+', help='A SQL DB Table '
+ 'data set mapping.', arg_group='DataSetMapping')
+ c.argument('sqldw_table_data_set_mapping', action=AddSqldwTableDataSetMapping, nargs='+', help='A SQL DW Table '
+ 'data set mapping.', arg_group='DataSetMapping')
+ c.argument('synapse_workspace_sql_pool_table_data_set_mapping',
+ action=AddSynapseWorkspaceSqlPoolTableDataSetMapping, nargs='+', help='A Synapse Workspace Sql Pool '
+ 'Table data set mapping', arg_group='DataSetMapping')
+
+ with self.argument_context('datashare data-set-mapping delete') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.', id_part='name')
+ c.argument('share_subscription_name', type=str, help='The name of the shareSubscription.',
+ id_part='child_name_1')
+ c.argument('data_set_mapping_name', options_list=['--name', '-n', '--data-set-mapping-name'], type=str,
+ help='The name of the dataSetMapping.', id_part='child_name_2')
+
+ with self.argument_context('datashare invitation list') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.')
+ c.argument('share_name', type=str, help='The name of the share.')
+ c.argument('skip_token', type=str, help='The continuation token')
+ c.argument('filter_', options_list=['--filter'], type=str, help='Filters the results using OData syntax.')
+ c.argument('orderby', type=str, help='Sorts the results using OData syntax.')
+
+ with self.argument_context('datashare invitation show') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.', id_part='name')
+ c.argument('share_name', type=str, help='The name of the share.', id_part='child_name_1')
+ c.argument('invitation_name', options_list=['--name', '-n', '--invitation-name'], type=str, help='The name of '
+ 'the invitation.', id_part='child_name_2')
+
+ with self.argument_context('datashare invitation create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.')
+ c.argument('share_name', type=str, help='The name of the share to send the invitation for.')
+ c.argument('invitation_name', options_list=['--name', '-n', '--invitation-name'], type=str, help='The name of '
+ 'the invitation.')
+ c.argument('expiration_date', help='The expiration date for the invitation and share subscription.')
+ c.argument('target_active_directory_id', type=str, help='The target Azure AD Id. Can\'t be combined with '
+ 'email.')
+ c.argument('target_email', type=str, help='The email the invitation is directed to.')
+ c.argument('target_object_id', type=str, help='The target user or application Id that invitation is being sent '
+ 'to. Must be specified along TargetActiveDirectoryId. This enables sending invitations to specific '
+ 'users or applications in an AD tenant.')
+
+ with self.argument_context('datashare invitation delete') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.', id_part='name')
+ c.argument('share_name', type=str, help='The name of the share.', id_part='child_name_1')
+ c.argument('invitation_name', options_list=['--name', '-n', '--invitation-name'], type=str, help='The name of '
+ 'the invitation.', id_part='child_name_2')
+
+ with self.argument_context('datashare list') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.')
+ c.argument('skip_token', type=str, help='Continuation Token')
+ c.argument('filter_', options_list=['--filter'], type=str, help='Filters the results using OData syntax.')
+ c.argument('orderby', type=str, help='Sorts the results using OData syntax.')
+
+ with self.argument_context('datashare show') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.', id_part='name')
+ c.argument('share_name', options_list=['--name', '-n', '--share-name'], type=str, help='The name of the share '
+ 'to retrieve.', id_part='child_name_1')
+
+ with self.argument_context('datashare create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.')
+ c.argument('share_name', options_list=['--name', '-n', '--share-name'], type=str,
+ help='The name of the share.')
+ c.argument('description', type=str, help='Share description.')
+ c.argument('share_kind', arg_type=get_enum_type(['CopyBased', 'InPlace']), help='Share kind.')
+ c.argument('terms', type=str, help='Share terms.')
+
+ with self.argument_context('datashare delete') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.', id_part='name')
+ c.argument('share_name', options_list=['--name', '-n', '--share-name'], type=str,
+ help='The name of the share.', id_part='child_name_1')
+
+ with self.argument_context('datashare list-synchronization') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.')
+ c.argument('share_name', options_list=['--name', '-n', '--share-name'], type=str,
+ help='The name of the share.')
+ c.argument('skip_token', type=str, help='Continuation token')
+ c.argument('filter_', options_list=['--filter'], type=str, help='Filters the results using OData syntax.')
+ c.argument('orderby', type=str, help='Sorts the results using OData syntax.')
+
+ with self.argument_context('datashare list-synchronization-detail') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.')
+ c.argument('share_name', options_list=['--name', '-n', '--share-name'], type=str,
+ help='The name of the share.')
+ c.argument('skip_token', type=str, help='Continuation token')
+ c.argument('filter_', options_list=['--filter'], type=str, help='Filters the results using OData syntax.')
+ c.argument('orderby', type=str, help='Sorts the results using OData syntax.')
+ c.argument('consumer_email', type=str, help='Email of the user who created the synchronization')
+ c.argument('consumer_name', type=str, help='Name of the user who created the synchronization')
+ c.argument('consumer_tenant_name', type=str,
+ help='Tenant name of the consumer who created the synchronization')
+ c.argument('duration_ms', type=int, help='synchronization duration')
+ c.argument('end_time', help='End time of synchronization')
+ c.argument('message', type=str, help='message of synchronization')
+ c.argument('start_time', help='start time of synchronization')
+ c.argument('status', type=str, help='Raw Status')
+ c.argument('synchronization_id', type=str, help='Synchronization id')
+
+ with self.argument_context('datashare wait') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.', id_part='name')
+ c.argument('share_name', options_list=['--name', '-n', '--share-name'], type=str, help='The name of the share '
+ 'to retrieve.', id_part='child_name_1')
+
+ with self.argument_context('datashare provider-share-subscription list') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.')
+ c.argument('share_name', type=str, help='The name of the share.')
+ c.argument('skip_token', type=str, help='Continuation Token')
+
+ with self.argument_context('datashare provider-share-subscription show') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.', id_part='name')
+ c.argument('share_name', type=str, help='The name of the share.', id_part='child_name_1')
+ c.argument('provider_share_subscription_id', type=str, help='To locate shareSubscription',
+ id_part='child_name_2')
+
+ with self.argument_context('datashare provider-share-subscription adjust') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.', id_part='name')
+ c.argument('share_name', type=str, help='The name of the share.', id_part='child_name_1')
+ c.argument('provider_share_subscription_id', type=str, help='To locate shareSubscription',
+ id_part='child_name_2')
+ c.argument('expiration_date', help='Expiration date of the share subscription in UTC format')
+
+ with self.argument_context('datashare provider-share-subscription reinstate') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.', id_part='name')
+ c.argument('share_name', type=str, help='The name of the share.', id_part='child_name_1')
+ c.argument('provider_share_subscription_id', type=str, help='To locate shareSubscription',
+ id_part='child_name_2')
+ c.argument('expiration_date', help='Expiration date of the share subscription in UTC format')
+
+ with self.argument_context('datashare provider-share-subscription revoke') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.', id_part='name')
+ c.argument('share_name', type=str, help='The name of the share.', id_part='child_name_1')
+ c.argument('provider_share_subscription_id', type=str, help='To locate shareSubscription',
+ id_part='child_name_2')
+
+ with self.argument_context('datashare provider-share-subscription wait') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.', id_part='name')
+ c.argument('share_name', type=str, help='The name of the share.', id_part='child_name_1')
+ c.argument('provider_share_subscription_id', type=str, help='To locate shareSubscription',
+ id_part='child_name_2')
+
+ with self.argument_context('datashare share-subscription list') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.')
+ c.argument('skip_token', type=str, help='Continuation Token')
+ c.argument('filter_', options_list=['--filter'], type=str, help='Filters the results using OData syntax.')
+ c.argument('orderby', type=str, help='Sorts the results using OData syntax.')
+
+ with self.argument_context('datashare share-subscription show') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.', id_part='name')
+ c.argument('share_subscription_name', options_list=['--name', '-n', '--share-subscription-name'], type=str,
+ help='The name of the shareSubscription.', id_part='child_name_1')
+
+ with self.argument_context('datashare share-subscription create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.')
+ c.argument('share_subscription_name', options_list=['--name', '-n', '--share-subscription-name'], type=str,
+ help='The name of the shareSubscription.')
+ c.argument('expiration_date', help='The expiration date of the share subscription.')
+ c.argument('invitation_id', type=str, help='The invitation id.')
+ c.argument('source_share_location', type=str, help='Source share location.')
+
+ with self.argument_context('datashare share-subscription delete') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.', id_part='name')
+ c.argument('share_subscription_name', options_list=['--name', '-n', '--share-subscription-name'], type=str,
+ help='The name of the shareSubscription.', id_part='child_name_1')
+
+ with self.argument_context('datashare share-subscription cancel-synchronization') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.', id_part='name')
+ c.argument('share_subscription_name', options_list=['--name', '-n', '--share-subscription-name'], type=str,
+ help='The name of the shareSubscription.', id_part='child_name_1')
+ c.argument('synchronization_id', type=str, help='Synchronization id')
+
+ with self.argument_context('datashare share-subscription list-source-share-synchronization-setting') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.')
+ c.argument('share_subscription_name', options_list=['--name', '-n', '--share-subscription-name'], type=str,
+ help='The name of the shareSubscription.')
+ c.argument('skip_token', type=str, help='Continuation token')
+
+ with self.argument_context('datashare share-subscription list-synchronization') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.')
+ c.argument('share_subscription_name', options_list=['--name', '-n', '--share-subscription-name'], type=str,
+ help='The name of the share subscription.')
+ c.argument('skip_token', type=str, help='Continuation token')
+ c.argument('filter_', options_list=['--filter'], type=str, help='Filters the results using OData syntax.')
+ c.argument('orderby', type=str, help='Sorts the results using OData syntax.')
+
+ with self.argument_context('datashare share-subscription list-synchronization-detail') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.')
+ c.argument('share_subscription_name', options_list=['--name', '-n', '--share-subscription-name'], type=str,
+ help='The name of the share subscription.')
+ c.argument('skip_token', type=str, help='Continuation token')
+ c.argument('filter_', options_list=['--filter'], type=str, help='Filters the results using OData syntax.')
+ c.argument('orderby', type=str, help='Sorts the results using OData syntax.')
+ c.argument('synchronization_id', type=str, help='Synchronization id')
+
+ with self.argument_context('datashare share-subscription synchronize') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.', id_part='name')
+ c.argument('share_subscription_name', options_list=['--name', '-n', '--share-subscription-name'], type=str,
+ help='The name of share subscription', id_part='child_name_1')
+ c.argument('synchronization_mode', arg_type=get_enum_type(['Incremental', 'FullSync']), help='Mode of '
+ 'synchronization used in triggers and snapshot sync. Incremental by default')
+
+ with self.argument_context('datashare share-subscription wait') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.', id_part='name')
+ c.argument('share_subscription_name', options_list=['--name', '-n', '--share-subscription-name'], type=str,
+ help='The name of the shareSubscription.', id_part='child_name_1')
+
+ with self.argument_context('datashare consumer-source-data-set list') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.')
+ c.argument('share_subscription_name', type=str, help='The name of the shareSubscription.')
+ c.argument('skip_token', type=str, help='Continuation token')
+
+ with self.argument_context('datashare synchronization-setting list') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.')
+ c.argument('share_name', type=str, help='The name of the share.')
+ c.argument('skip_token', type=str, help='continuation token')
+
+ with self.argument_context('datashare synchronization-setting show') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.', id_part='name')
+ c.argument('share_name', type=str, help='The name of the share.', id_part='child_name_1')
+ c.argument('synchronization_setting_name', options_list=['--name', '-n', '--synchronization-setting-name'],
+ type=str, help='The name of the synchronizationSetting.', id_part='child_name_2')
+
+ with self.argument_context('datashare synchronization-setting create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.')
+ c.argument('share_name', type=str, help='The name of the share to add the synchronization setting to.')
+ c.argument('synchronization_setting_name', options_list=['--name', '-n', '--synchronization-setting-name'],
+ type=str, help='The name of the synchronizationSetting.')
+ c.argument('scheduled_synchronization_setting', action=AddScheduledSynchronizationSetting, nargs='+', help='A '
+ 'type of synchronization setting based on schedule', arg_group='SynchronizationSetting')
+
+ with self.argument_context('datashare synchronization-setting delete') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.', id_part='name')
+ c.argument('share_name', type=str, help='The name of the share.', id_part='child_name_1')
+ c.argument('synchronization_setting_name', options_list=['--name', '-n', '--synchronization-setting-name'],
+ type=str, help='The name of the synchronizationSetting .', id_part='child_name_2')
+
+ with self.argument_context('datashare synchronization-setting wait') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.', id_part='name')
+ c.argument('share_name', type=str, help='The name of the share.', id_part='child_name_1')
+ c.argument('synchronization_setting_name', options_list=['--name', '-n', '--synchronization-setting-name'],
+ type=str, help='The name of the synchronizationSetting.', id_part='child_name_2')
+
+ with self.argument_context('datashare trigger list') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.')
+ c.argument('share_subscription_name', type=str, help='The name of the share subscription.')
+ c.argument('skip_token', type=str, help='Continuation token')
+
+ with self.argument_context('datashare trigger show') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.', id_part='name')
+ c.argument('share_subscription_name', type=str, help='The name of the shareSubscription.',
+ id_part='child_name_1')
+ c.argument('trigger_name', options_list=['--name', '-n', '--trigger-name'], type=str, help='The name of the '
+ 'trigger.', id_part='child_name_2')
+
+ with self.argument_context('datashare trigger create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.')
+ c.argument('share_subscription_name', type=str, help='The name of the share subscription which will hold the '
+ 'data set sink.')
+ c.argument('trigger_name', options_list=['--name', '-n', '--trigger-name'], type=str, help='The name of the '
+ 'trigger.')
+ c.argument('scheduled_trigger', action=AddScheduledTrigger, nargs='+', help='A type of trigger based on '
+ 'schedule', arg_group='Trigger')
+
+ with self.argument_context('datashare trigger delete') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.', id_part='name')
+ c.argument('share_subscription_name', type=str, help='The name of the shareSubscription.',
+ id_part='child_name_1')
+ c.argument('trigger_name', options_list=['--name', '-n', '--trigger-name'], type=str, help='The name of the '
+ 'trigger.', id_part='child_name_2')
+
+ with self.argument_context('datashare trigger wait') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('account_name', type=str, help='The name of the share account.', id_part='name')
+ c.argument('share_subscription_name', type=str, help='The name of the shareSubscription.',
+ id_part='child_name_1')
+ c.argument('trigger_name', options_list=['--name', '-n', '--trigger-name'], type=str, help='The name of the '
+ 'trigger.', id_part='child_name_2')
diff --git a/src/datashare/azext_datashare/generated/_validators.py b/src/datashare/azext_datashare/generated/_validators.py
new file mode 100644
index 00000000000..b33a44c1ebf
--- /dev/null
+++ b/src/datashare/azext_datashare/generated/_validators.py
@@ -0,0 +1,9 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
diff --git a/src/datashare/azext_datashare/generated/action.py b/src/datashare/azext_datashare/generated/action.py
new file mode 100644
index 00000000000..31f5bf9f0b9
--- /dev/null
+++ b/src/datashare/azext_datashare/generated/action.py
@@ -0,0 +1,864 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=protected-access
+
+import argparse
+from collections import defaultdict
+from knack.util import CLIError
+
+
+class AddAdlsGen1FileDataSet(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.adls_gen1_file_data_set = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'account-name':
+ d['account_name'] = v[0]
+ elif kl == 'file-name':
+ d['file_name'] = v[0]
+ elif kl == 'folder-path':
+ d['folder_path'] = v[0]
+ elif kl == 'resource-group':
+ d['resource_group'] = v[0]
+ elif kl == 'subscription-id':
+ d['subscription_id'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter adls_gen1_file_data_set. All possible '
+ 'keys are: account-name, file-name, folder-path, resource-group, subscription-id'.
+ format(k))
+ d['kind'] = 'AdlsGen1File'
+ return d
+
+
+class AddAdlsGen1FolderDataSet(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.adls_gen1_folder_data_set = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'account-name':
+ d['account_name'] = v[0]
+ elif kl == 'folder-path':
+ d['folder_path'] = v[0]
+ elif kl == 'resource-group':
+ d['resource_group'] = v[0]
+ elif kl == 'subscription-id':
+ d['subscription_id'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter adls_gen1_folder_data_set. All possible '
+ 'keys are: account-name, folder-path, resource-group, subscription-id'.format(k))
+ d['kind'] = 'AdlsGen1Folder'
+ return d
+
+
+class AddAdlsGen2FileDataSet(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.adls_gen2_file_data_set = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'file-path':
+ d['file_path'] = v[0]
+ elif kl == 'file-system':
+ d['file_system'] = v[0]
+ elif kl == 'resource-group':
+ d['resource_group'] = v[0]
+ elif kl == 'storage-account-name':
+ d['storage_account_name'] = v[0]
+ elif kl == 'subscription-id':
+ d['subscription_id'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter adls_gen2_file_data_set. All possible '
+ 'keys are: file-path, file-system, resource-group, storage-account-name, '
+ 'subscription-id'.format(k))
+ d['kind'] = 'AdlsGen2File'
+ return d
+
+
+class AddAdlsGen2FileSystemDataSet(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.adls_gen2_file_system_data_set = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'file-system':
+ d['file_system'] = v[0]
+ elif kl == 'resource-group':
+ d['resource_group'] = v[0]
+ elif kl == 'storage-account-name':
+ d['storage_account_name'] = v[0]
+ elif kl == 'subscription-id':
+ d['subscription_id'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter adls_gen2_file_system_data_set. All '
+ 'possible keys are: file-system, resource-group, storage-account-name, subscription-id'.
+ format(k))
+ d['kind'] = 'AdlsGen2FileSystem'
+ return d
+
+
+class AddAdlsGen2FolderDataSet(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.adls_gen2_folder_data_set = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'file-system':
+ d['file_system'] = v[0]
+ elif kl == 'folder-path':
+ d['folder_path'] = v[0]
+ elif kl == 'resource-group':
+ d['resource_group'] = v[0]
+ elif kl == 'storage-account-name':
+ d['storage_account_name'] = v[0]
+ elif kl == 'subscription-id':
+ d['subscription_id'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter adls_gen2_folder_data_set. All possible '
+ 'keys are: file-system, folder-path, resource-group, storage-account-name, '
+ 'subscription-id'.format(k))
+ d['kind'] = 'AdlsGen2Folder'
+ return d
+
+
+class AddBlobContainerDataSet(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.blob_container_data_set = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'container-name':
+ d['container_name'] = v[0]
+ elif kl == 'resource-group':
+ d['resource_group'] = v[0]
+ elif kl == 'storage-account-name':
+ d['storage_account_name'] = v[0]
+ elif kl == 'subscription-id':
+ d['subscription_id'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter blob_container_data_set. All possible '
+ 'keys are: container-name, resource-group, storage-account-name, subscription-id'.
+ format(k))
+ d['kind'] = 'Container'
+ return d
+
+
+class AddBlobDataSet(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.blob_data_set = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'container-name':
+ d['container_name'] = v[0]
+ elif kl == 'file-path':
+ d['file_path'] = v[0]
+ elif kl == 'resource-group':
+ d['resource_group'] = v[0]
+ elif kl == 'storage-account-name':
+ d['storage_account_name'] = v[0]
+ elif kl == 'subscription-id':
+ d['subscription_id'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter blob_data_set. All possible keys are: '
+ 'container-name, file-path, resource-group, storage-account-name, subscription-id'.
+ format(k))
+ d['kind'] = 'Blob'
+ return d
+
+
+class AddBlobFolderDataSet(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.blob_folder_data_set = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'container-name':
+ d['container_name'] = v[0]
+ elif kl == 'prefix':
+ d['prefix'] = v[0]
+ elif kl == 'resource-group':
+ d['resource_group'] = v[0]
+ elif kl == 'storage-account-name':
+ d['storage_account_name'] = v[0]
+ elif kl == 'subscription-id':
+ d['subscription_id'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter blob_folder_data_set. All possible keys '
+ 'are: container-name, prefix, resource-group, storage-account-name, subscription-id'.
+ format(k))
+ d['kind'] = 'BlobFolder'
+ return d
+
+
+class AddKustoClusterDataSet(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.kusto_cluster_data_set = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'kusto-cluster-resource-id':
+ d['kusto_cluster_resource_id'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter kusto_cluster_data_set. All possible keys '
+ 'are: kusto-cluster-resource-id'.format(k))
+ d['kind'] = 'KustoCluster'
+ return d
+
+
+class AddKustoDatabaseDataSet(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.kusto_database_data_set = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'kusto-database-resource-id':
+ d['kusto_database_resource_id'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter kusto_database_data_set. All possible '
+ 'keys are: kusto-database-resource-id'.format(k))
+ d['kind'] = 'KustoDatabase'
+ return d
+
+
+class AddSqldbTableDataSet(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.sqldb_table_data_set = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'database-name':
+ d['database_name'] = v[0]
+ elif kl == 'schema-name':
+ d['schema_name'] = v[0]
+ elif kl == 'sql-server-resource-id':
+ d['sql_server_resource_id'] = v[0]
+ elif kl == 'table-name':
+ d['table_name'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter sqldb_table_data_set. All possible keys '
+ 'are: database-name, schema-name, sql-server-resource-id, table-name'.format(k))
+ d['kind'] = 'SqlDBTable'
+ return d
+
+
+class AddSqldwTableDataSet(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.sqldw_table_data_set = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'data-warehouse-name':
+ d['data_warehouse_name'] = v[0]
+ elif kl == 'schema-name':
+ d['schema_name'] = v[0]
+ elif kl == 'sql-server-resource-id':
+ d['sql_server_resource_id'] = v[0]
+ elif kl == 'table-name':
+ d['table_name'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter sqldw_table_data_set. All possible keys '
+ 'are: data-warehouse-name, schema-name, sql-server-resource-id, table-name'.format(k))
+ d['kind'] = 'SqlDWTable'
+ return d
+
+
+class AddSynapseWorkspaceSqlPoolTableDataSet(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.synapse_workspace_sql_pool_table_data_set = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'synapse-workspace-sql-pool-table-resource-id':
+ d['synapse_workspace_sql_pool_table_resource_id'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter synapse_workspace_sql_pool_table_data_set.'
+ ' All possible keys are: synapse-workspace-sql-pool-table-resource-id'.format(k))
+ d['kind'] = 'SynapseWorkspaceSqlPoolTable'
+ return d
+
+
+class AddAdlsGen2FileDataSetMapping(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.adls_gen2_file_data_set_mapping = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'data-set-id':
+ d['data_set_id'] = v[0]
+ elif kl == 'file-path':
+ d['file_path'] = v[0]
+ elif kl == 'file-system':
+ d['file_system'] = v[0]
+ elif kl == 'output-type':
+ d['output_type'] = v[0]
+ elif kl == 'resource-group':
+ d['resource_group'] = v[0]
+ elif kl == 'storage-account-name':
+ d['storage_account_name'] = v[0]
+ elif kl == 'subscription-id':
+ d['subscription_id'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter adls_gen2_file_data_set_mapping. All '
+ 'possible keys are: data-set-id, file-path, file-system, output-type, resource-group, '
+ 'storage-account-name, subscription-id'.format(k))
+ d['kind'] = 'AdlsGen2File'
+ return d
+
+
+class AddAdlsGen2FileSystemDataSetMapping(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.adls_gen2_file_system_data_set_mapping = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'data-set-id':
+ d['data_set_id'] = v[0]
+ elif kl == 'file-system':
+ d['file_system'] = v[0]
+ elif kl == 'resource-group':
+ d['resource_group'] = v[0]
+ elif kl == 'storage-account-name':
+ d['storage_account_name'] = v[0]
+ elif kl == 'subscription-id':
+ d['subscription_id'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter adls_gen2_file_system_data_set_mapping. '
+ 'All possible keys are: data-set-id, file-system, resource-group, storage-account-name, '
+ 'subscription-id'.format(k))
+ d['kind'] = 'AdlsGen2FileSystem'
+ return d
+
+
+class AddAdlsGen2FolderDataSetMapping(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.adls_gen2_folder_data_set_mapping = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'data-set-id':
+ d['data_set_id'] = v[0]
+ elif kl == 'file-system':
+ d['file_system'] = v[0]
+ elif kl == 'folder-path':
+ d['folder_path'] = v[0]
+ elif kl == 'resource-group':
+ d['resource_group'] = v[0]
+ elif kl == 'storage-account-name':
+ d['storage_account_name'] = v[0]
+ elif kl == 'subscription-id':
+ d['subscription_id'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter adls_gen2_folder_data_set_mapping. All '
+ 'possible keys are: data-set-id, file-system, folder-path, resource-group, '
+ 'storage-account-name, subscription-id'.format(k))
+ d['kind'] = 'AdlsGen2Folder'
+ return d
+
+
+class AddBlobContainerDataSetMapping(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.blob_container_data_set_mapping = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'container-name':
+ d['container_name'] = v[0]
+ elif kl == 'data-set-id':
+ d['data_set_id'] = v[0]
+ elif kl == 'resource-group':
+ d['resource_group'] = v[0]
+ elif kl == 'storage-account-name':
+ d['storage_account_name'] = v[0]
+ elif kl == 'subscription-id':
+ d['subscription_id'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter blob_container_data_set_mapping. All '
+ 'possible keys are: container-name, data-set-id, resource-group, storage-account-name, '
+ 'subscription-id'.format(k))
+ d['kind'] = 'Container'
+ return d
+
+
+class AddBlobDataSetMapping(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.blob_data_set_mapping = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'container-name':
+ d['container_name'] = v[0]
+ elif kl == 'data-set-id':
+ d['data_set_id'] = v[0]
+ elif kl == 'file-path':
+ d['file_path'] = v[0]
+ elif kl == 'output-type':
+ d['output_type'] = v[0]
+ elif kl == 'resource-group':
+ d['resource_group'] = v[0]
+ elif kl == 'storage-account-name':
+ d['storage_account_name'] = v[0]
+ elif kl == 'subscription-id':
+ d['subscription_id'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter blob_data_set_mapping. All possible keys '
+ 'are: container-name, data-set-id, file-path, output-type, resource-group, '
+ 'storage-account-name, subscription-id'.format(k))
+ d['kind'] = 'Blob'
+ return d
+
+
+class AddBlobFolderDataSetMapping(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.blob_folder_data_set_mapping = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'container-name':
+ d['container_name'] = v[0]
+ elif kl == 'data-set-id':
+ d['data_set_id'] = v[0]
+ elif kl == 'prefix':
+ d['prefix'] = v[0]
+ elif kl == 'resource-group':
+ d['resource_group'] = v[0]
+ elif kl == 'storage-account-name':
+ d['storage_account_name'] = v[0]
+ elif kl == 'subscription-id':
+ d['subscription_id'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter blob_folder_data_set_mapping. All '
+ 'possible keys are: container-name, data-set-id, prefix, resource-group, '
+ 'storage-account-name, subscription-id'.format(k))
+ d['kind'] = 'BlobFolder'
+ return d
+
+
+class AddKustoClusterDataSetMapping(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.kusto_cluster_data_set_mapping = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'data-set-id':
+ d['data_set_id'] = v[0]
+ elif kl == 'kusto-cluster-resource-id':
+ d['kusto_cluster_resource_id'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter kusto_cluster_data_set_mapping. All '
+ 'possible keys are: data-set-id, kusto-cluster-resource-id'.format(k))
+ d['kind'] = 'KustoCluster'
+ return d
+
+
+class AddKustoDatabaseDataSetMapping(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.kusto_database_data_set_mapping = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'data-set-id':
+ d['data_set_id'] = v[0]
+ elif kl == 'kusto-cluster-resource-id':
+ d['kusto_cluster_resource_id'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter kusto_database_data_set_mapping. All '
+ 'possible keys are: data-set-id, kusto-cluster-resource-id'.format(k))
+ d['kind'] = 'KustoDatabase'
+ return d
+
+
+class AddSqldbTableDataSetMapping(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.sqldb_table_data_set_mapping = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'database-name':
+ d['database_name'] = v[0]
+ elif kl == 'data-set-id':
+ d['data_set_id'] = v[0]
+ elif kl == 'schema-name':
+ d['schema_name'] = v[0]
+ elif kl == 'sql-server-resource-id':
+ d['sql_server_resource_id'] = v[0]
+ elif kl == 'table-name':
+ d['table_name'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter sqldb_table_data_set_mapping. All '
+ 'possible keys are: database-name, data-set-id, schema-name, sql-server-resource-id, '
+ 'table-name'.format(k))
+ d['kind'] = 'SqlDBTable'
+ return d
+
+
+class AddSqldwTableDataSetMapping(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.sqldw_table_data_set_mapping = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'data-set-id':
+ d['data_set_id'] = v[0]
+ elif kl == 'data-warehouse-name':
+ d['data_warehouse_name'] = v[0]
+ elif kl == 'schema-name':
+ d['schema_name'] = v[0]
+ elif kl == 'sql-server-resource-id':
+ d['sql_server_resource_id'] = v[0]
+ elif kl == 'table-name':
+ d['table_name'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter sqldw_table_data_set_mapping. All '
+ 'possible keys are: data-set-id, data-warehouse-name, schema-name, '
+ 'sql-server-resource-id, table-name'.format(k))
+ d['kind'] = 'SqlDWTable'
+ return d
+
+
+class AddSynapseWorkspaceSqlPoolTableDataSetMapping(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.synapse_workspace_sql_pool_table_data_set_mapping = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'data-set-id':
+ d['data_set_id'] = v[0]
+ elif kl == 'synapse-workspace-sql-pool-table-resource-id':
+ d['synapse_workspace_sql_pool_table_resource_id'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter synapse_workspace_sql_pool_table_data_set_'
+ 'mapping. All possible keys are: data-set-id, synapse-workspace-sql-pool-table-resource-'
+ 'id'.format(k))
+ d['kind'] = 'SynapseWorkspaceSqlPoolTable'
+ return d
+
+
+class AddScheduledSynchronizationSetting(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.scheduled_synchronization_setting = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'recurrence-interval':
+ d['recurrence_interval'] = v[0]
+ elif kl == 'synchronization-time':
+ d['synchronization_time'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter scheduled_synchronization_setting. All '
+ 'possible keys are: recurrence-interval, synchronization-time'.format(k))
+ d['kind'] = 'ScheduleBased'
+ return d
+
+
+class AddScheduledTrigger(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.scheduled_trigger = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'recurrence-interval':
+ d['recurrence_interval'] = v[0]
+ elif kl == 'synchronization-mode':
+ d['synchronization_mode'] = v[0]
+ elif kl == 'synchronization-time':
+ d['synchronization_time'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter scheduled_trigger. All possible keys are: '
+ 'recurrence-interval, synchronization-mode, synchronization-time'.format(k))
+ d['kind'] = 'ScheduleBased'
+ return d
diff --git a/src/datashare/azext_datashare/generated/commands.py b/src/datashare/azext_datashare/generated/commands.py
new file mode 100644
index 00000000000..01d38a37790
--- /dev/null
+++ b/src/datashare/azext_datashare/generated/commands.py
@@ -0,0 +1,155 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=too-many-statements
+# pylint: disable=too-many-locals
+
+from azure.cli.core.commands import CliCommandType
+
+
+def load_command_table(self, _):
+
+ from azext_datashare.generated._client_factory import cf_account
+ datashare_account = CliCommandType(
+ operations_tmpl='azext_datashare.vendored_sdks.datashare.operations._accounts_operations#AccountsOperations.{}',
+ client_factory=cf_account)
+ with self.command_group('datashare account', datashare_account, client_factory=cf_account) as g:
+ g.custom_command('list', 'datashare_account_list')
+ g.custom_show_command('show', 'datashare_account_show')
+ g.custom_command('create', 'datashare_account_create', supports_no_wait=True)
+ g.custom_command('update', 'datashare_account_update')
+ g.custom_command('delete', 'datashare_account_delete', supports_no_wait=True, confirmation=True)
+ g.custom_wait_command('wait', 'datashare_account_show')
+
+ from azext_datashare.generated._client_factory import cf_consumer_invitation
+ datashare_consumer_invitation = CliCommandType(
+ operations_tmpl='azext_datashare.vendored_sdks.datashare.operations._consumer_invitations_operations#ConsumerIn'
+ 'vitationsOperations.{}',
+ client_factory=cf_consumer_invitation)
+ with self.command_group('datashare consumer-invitation', datashare_consumer_invitation,
+ client_factory=cf_consumer_invitation) as g:
+ g.custom_show_command('show', 'datashare_consumer_invitation_show')
+ g.custom_command('list-invitation', 'datashare_consumer_invitation_list_invitation')
+ g.custom_command('reject-invitation', 'datashare_consumer_invitation_reject_invitation')
+
+ from azext_datashare.generated._client_factory import cf_data_set
+ datashare_data_set = CliCommandType(
+ operations_tmpl='azext_datashare.vendored_sdks.datashare.operations._data_sets_operations#DataSetsOperations.{}'
+ '',
+ client_factory=cf_data_set)
+ with self.command_group('datashare data-set', datashare_data_set, client_factory=cf_data_set) as g:
+ g.custom_command('list', 'datashare_data_set_list')
+ g.custom_show_command('show', 'datashare_data_set_show')
+ g.custom_command('create', 'datashare_data_set_create')
+ g.custom_command('delete', 'datashare_data_set_delete', supports_no_wait=True, confirmation=True)
+ g.custom_wait_command('wait', 'datashare_data_set_show')
+
+ from azext_datashare.generated._client_factory import cf_data_set_mapping
+ datashare_data_set_mapping = CliCommandType(
+ operations_tmpl='azext_datashare.vendored_sdks.datashare.operations._data_set_mappings_operations#DataSetMappin'
+ 'gsOperations.{}',
+ client_factory=cf_data_set_mapping)
+ with self.command_group('datashare data-set-mapping', datashare_data_set_mapping,
+ client_factory=cf_data_set_mapping) as g:
+ g.custom_command('list', 'datashare_data_set_mapping_list')
+ g.custom_show_command('show', 'datashare_data_set_mapping_show')
+ g.custom_command('create', 'datashare_data_set_mapping_create')
+ g.custom_command('delete', 'datashare_data_set_mapping_delete', confirmation=True)
+
+ from azext_datashare.generated._client_factory import cf_invitation
+ datashare_invitation = CliCommandType(
+ operations_tmpl='azext_datashare.vendored_sdks.datashare.operations._invitations_operations#InvitationsOperatio'
+ 'ns.{}',
+ client_factory=cf_invitation)
+ with self.command_group('datashare invitation', datashare_invitation, client_factory=cf_invitation) as g:
+ g.custom_command('list', 'datashare_invitation_list')
+ g.custom_show_command('show', 'datashare_invitation_show')
+ g.custom_command('create', 'datashare_invitation_create')
+ g.custom_command('delete', 'datashare_invitation_delete', confirmation=True)
+
+ from azext_datashare.generated._client_factory import cf_share
+ datashare_share = CliCommandType(
+ operations_tmpl='azext_datashare.vendored_sdks.datashare.operations._shares_operations#SharesOperations.{}',
+ client_factory=cf_share)
+ with self.command_group('datashare', datashare_share, client_factory=cf_share, is_experimental=True) as g:
+ g.custom_command('list', 'datashare_list')
+ g.custom_show_command('show', 'datashare_show')
+ g.custom_command('create', 'datashare_create')
+ g.custom_command('delete', 'datashare_delete', supports_no_wait=True, confirmation=True)
+ g.custom_command('list-synchronization', 'datashare_list_synchronization')
+ g.custom_command('list-synchronization-detail', 'datashare_list_synchronization_detail')
+ g.custom_wait_command('wait', 'datashare_show')
+
+ from azext_datashare.generated._client_factory import cf_provider_share_subscription
+ datashare_provider_share_subscription = CliCommandType(
+ operations_tmpl='azext_datashare.vendored_sdks.datashare.operations._provider_share_subscriptions_operations#Pr'
+ 'oviderShareSubscriptionsOperations.{}',
+ client_factory=cf_provider_share_subscription)
+ with self.command_group('datashare provider-share-subscription', datashare_provider_share_subscription,
+ client_factory=cf_provider_share_subscription) as g:
+ g.custom_command('list', 'datashare_provider_share_subscription_list')
+ g.custom_show_command('show', 'datashare_provider_share_subscription_show')
+ g.custom_command('adjust', 'datashare_provider_share_subscription_adjust')
+ g.custom_command('reinstate', 'datashare_provider_share_subscription_reinstate')
+ g.custom_command('revoke', 'datashare_provider_share_subscription_revoke', supports_no_wait=True)
+ g.custom_wait_command('wait', 'datashare_provider_share_subscription_show')
+
+ from azext_datashare.generated._client_factory import cf_share_subscription
+ datashare_share_subscription = CliCommandType(
+ operations_tmpl='azext_datashare.vendored_sdks.datashare.operations._share_subscriptions_operations#ShareSubscr'
+ 'iptionsOperations.{}',
+ client_factory=cf_share_subscription)
+ with self.command_group('datashare share-subscription', datashare_share_subscription,
+ client_factory=cf_share_subscription) as g:
+ g.custom_command('list', 'datashare_share_subscription_list')
+ g.custom_show_command('show', 'datashare_share_subscription_show')
+ g.custom_command('create', 'datashare_share_subscription_create')
+ g.custom_command('delete', 'datashare_share_subscription_delete', supports_no_wait=True, confirmation=True)
+ g.custom_command('cancel-synchronization', 'datashare_share_subscription_cancel_synchronization',
+ supports_no_wait=True)
+ g.custom_command('list-source-share-synchronization-setting', 'datashare_share_subscription_list_source_share_s'
+ 'ynchronization_setting')
+ g.custom_command('list-synchronization', 'datashare_share_subscription_list_synchronization')
+ g.custom_command('list-synchronization-detail', 'datashare_share_subscription_list_synchronization_detail')
+ g.custom_command('synchronize', 'datashare_share_subscription_synchronize', supports_no_wait=True)
+ g.custom_wait_command('wait', 'datashare_share_subscription_show')
+
+ from azext_datashare.generated._client_factory import cf_consumer_source_data_set
+ datashare_consumer_source_data_set = CliCommandType(
+ operations_tmpl='azext_datashare.vendored_sdks.datashare.operations._consumer_source_data_sets_operations#Consu'
+ 'merSourceDataSetsOperations.{}',
+ client_factory=cf_consumer_source_data_set)
+ with self.command_group('datashare consumer-source-data-set', datashare_consumer_source_data_set,
+ client_factory=cf_consumer_source_data_set) as g:
+ g.custom_command('list', 'datashare_consumer_source_data_set_list')
+
+ from azext_datashare.generated._client_factory import cf_synchronization_setting
+ datashare_synchronization_setting = CliCommandType(
+ operations_tmpl='azext_datashare.vendored_sdks.datashare.operations._synchronization_settings_operations#Synchr'
+ 'onizationSettingsOperations.{}',
+ client_factory=cf_synchronization_setting)
+ with self.command_group('datashare synchronization-setting', datashare_synchronization_setting,
+ client_factory=cf_synchronization_setting) as g:
+ g.custom_command('list', 'datashare_synchronization_setting_list')
+ g.custom_show_command('show', 'datashare_synchronization_setting_show')
+ g.custom_command('create', 'datashare_synchronization_setting_create')
+ g.custom_command('delete', 'datashare_synchronization_setting_delete', supports_no_wait=True,
+ confirmation=True)
+ g.custom_wait_command('wait', 'datashare_synchronization_setting_show')
+
+ from azext_datashare.generated._client_factory import cf_trigger
+ datashare_trigger = CliCommandType(
+ operations_tmpl='azext_datashare.vendored_sdks.datashare.operations._triggers_operations#TriggersOperations.{}',
+ client_factory=cf_trigger)
+ with self.command_group('datashare trigger', datashare_trigger, client_factory=cf_trigger) as g:
+ g.custom_command('list', 'datashare_trigger_list')
+ g.custom_show_command('show', 'datashare_trigger_show')
+ g.custom_command('create', 'datashare_trigger_create', supports_no_wait=True)
+ g.custom_command('delete', 'datashare_trigger_delete', supports_no_wait=True, confirmation=True)
+ g.custom_wait_command('wait', 'datashare_trigger_show')
diff --git a/src/datashare/azext_datashare/generated/custom.py b/src/datashare/azext_datashare/generated/custom.py
new file mode 100644
index 00000000000..5967d71dbff
--- /dev/null
+++ b/src/datashare/azext_datashare/generated/custom.py
@@ -0,0 +1,772 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=too-many-lines
+
+from knack.util import CLIError
+from azure.cli.core.util import sdk_no_wait
+
+
+def datashare_account_list(client,
+ resource_group_name=None,
+ skip_token=None):
+ if resource_group_name:
+ return client.list_by_resource_group(resource_group_name=resource_group_name,
+ skip_token=skip_token)
+ return client.list_by_subscription(skip_token=skip_token)
+
+
+def datashare_account_show(client,
+ resource_group_name,
+ account_name):
+ return client.get(resource_group_name=resource_group_name,
+ account_name=account_name)
+
+
+def datashare_account_create(client,
+ resource_group_name,
+ account_name,
+ location=None,
+ tags=None,
+ no_wait=False):
+ account = {}
+ account['location'] = location
+ account['tags'] = tags
+ account['identity'] = {}
+ account['identity']['type'] = "SystemAssigned"
+ return sdk_no_wait(no_wait,
+ client.begin_create,
+ resource_group_name=resource_group_name,
+ account_name=account_name,
+ account=account)
+
+
+def datashare_account_update(client,
+ resource_group_name,
+ account_name,
+ tags=None):
+ account_update_parameters = {}
+ account_update_parameters['tags'] = tags
+ return client.update(resource_group_name=resource_group_name,
+ account_name=account_name,
+ account_update_parameters=account_update_parameters)
+
+
+def datashare_account_delete(client,
+ resource_group_name,
+ account_name,
+ no_wait=False):
+ return sdk_no_wait(no_wait,
+ client.begin_delete,
+ resource_group_name=resource_group_name,
+ account_name=account_name)
+
+
+def datashare_consumer_invitation_show(client,
+ location,
+ invitation_id):
+ return client.get(location=location,
+ invitation_id=invitation_id)
+
+
+def datashare_consumer_invitation_list_invitation(client,
+ skip_token=None):
+ return client.list_invitations(skip_token=skip_token)
+
+
+def datashare_consumer_invitation_reject_invitation(client,
+ location,
+ invitation_id):
+ invitation = {}
+ invitation['invitation_id'] = invitation_id
+ return client.reject_invitation(location=location,
+ invitation=invitation)
+
+
+def datashare_data_set_list(client,
+ resource_group_name,
+ account_name,
+ share_name,
+ skip_token=None,
+ filter_=None,
+ orderby=None):
+ return client.list_by_share(resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_name=share_name,
+ skip_token=skip_token,
+ filter=filter_,
+ orderby=orderby)
+
+
+def datashare_data_set_show(client,
+ resource_group_name,
+ account_name,
+ share_name,
+ data_set_name):
+ return client.get(resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_name=share_name,
+ data_set_name=data_set_name)
+
+
+def datashare_data_set_create(client,
+ resource_group_name,
+ account_name,
+ share_name,
+ data_set_name,
+ adls_gen1_file_data_set=None,
+ adls_gen1_folder_data_set=None,
+ adls_gen2_file_data_set=None,
+ adls_gen2_file_system_data_set=None,
+ adls_gen2_folder_data_set=None,
+ blob_container_data_set=None,
+ blob_data_set=None,
+ blob_folder_data_set=None,
+ kusto_cluster_data_set=None,
+ kusto_database_data_set=None,
+ sqldb_table_data_set=None,
+ sqldw_table_data_set=None,
+ synapse_workspace_sql_pool_table_data_set=None):
+ all_data_set = []
+ if adls_gen1_file_data_set is not None:
+ all_data_set.append(adls_gen1_file_data_set)
+ if adls_gen1_folder_data_set is not None:
+ all_data_set.append(adls_gen1_folder_data_set)
+ if adls_gen2_file_data_set is not None:
+ all_data_set.append(adls_gen2_file_data_set)
+ if adls_gen2_file_system_data_set is not None:
+ all_data_set.append(adls_gen2_file_system_data_set)
+ if adls_gen2_folder_data_set is not None:
+ all_data_set.append(adls_gen2_folder_data_set)
+ if blob_container_data_set is not None:
+ all_data_set.append(blob_container_data_set)
+ if blob_data_set is not None:
+ all_data_set.append(blob_data_set)
+ if blob_folder_data_set is not None:
+ all_data_set.append(blob_folder_data_set)
+ if kusto_cluster_data_set is not None:
+ all_data_set.append(kusto_cluster_data_set)
+ if kusto_database_data_set is not None:
+ all_data_set.append(kusto_database_data_set)
+ if sqldb_table_data_set is not None:
+ all_data_set.append(sqldb_table_data_set)
+ if sqldw_table_data_set is not None:
+ all_data_set.append(sqldw_table_data_set)
+ if synapse_workspace_sql_pool_table_data_set is not None:
+ all_data_set.append(synapse_workspace_sql_pool_table_data_set)
+ if len(all_data_set) > 1:
+ raise CLIError('at most one of adls_gen1_file_data_set, adls_gen1_folder_data_set, adls_gen2_file_data_set, '
+ 'adls_gen2_file_system_data_set, adls_gen2_folder_data_set, blob_container_data_set, '
+ 'blob_data_set, blob_folder_data_set, kusto_cluster_data_set, kusto_database_data_set, '
+ 'sqldb_table_data_set, sqldw_table_data_set, synapse_workspace_sql_pool_table_data_set is '
+ 'needed for data_set!')
+ if len(all_data_set) != 1:
+ raise CLIError('data_set is required. but none of adls_gen1_file_data_set, adls_gen1_folder_data_set, '
+ 'adls_gen2_file_data_set, adls_gen2_file_system_data_set, adls_gen2_folder_data_set, '
+ 'blob_container_data_set, blob_data_set, blob_folder_data_set, kusto_cluster_data_set, '
+ 'kusto_database_data_set, sqldb_table_data_set, sqldw_table_data_set, '
+ 'synapse_workspace_sql_pool_table_data_set is provided!')
+ data_set = all_data_set[0] if len(all_data_set) == 1 else None
+ return client.create(resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_name=share_name,
+ data_set_name=data_set_name,
+ data_set=data_set)
+
+
+def datashare_data_set_delete(client,
+ resource_group_name,
+ account_name,
+ share_name,
+ data_set_name,
+ no_wait=False):
+ return sdk_no_wait(no_wait,
+ client.begin_delete,
+ resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_name=share_name,
+ data_set_name=data_set_name)
+
+
+def datashare_data_set_mapping_list(client,
+ resource_group_name,
+ account_name,
+ share_subscription_name,
+ skip_token=None,
+ filter_=None,
+ orderby=None):
+ return client.list_by_share_subscription(resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_subscription_name=share_subscription_name,
+ skip_token=skip_token,
+ filter=filter_,
+ orderby=orderby)
+
+
+def datashare_data_set_mapping_show(client,
+ resource_group_name,
+ account_name,
+ share_subscription_name,
+ data_set_mapping_name):
+ return client.get(resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_subscription_name=share_subscription_name,
+ data_set_mapping_name=data_set_mapping_name)
+
+
+def datashare_data_set_mapping_create(client,
+ resource_group_name,
+ account_name,
+ share_subscription_name,
+ data_set_mapping_name,
+ adls_gen2_file_data_set_mapping=None,
+ adls_gen2_file_system_data_set_mapping=None,
+ adls_gen2_folder_data_set_mapping=None,
+ blob_container_data_set_mapping=None,
+ blob_data_set_mapping=None,
+ blob_folder_data_set_mapping=None,
+ kusto_cluster_data_set_mapping=None,
+ kusto_database_data_set_mapping=None,
+ sqldb_table_data_set_mapping=None,
+ sqldw_table_data_set_mapping=None,
+ synapse_workspace_sql_pool_table_data_set_mapping=None):
+ all_data_set_mapping = []
+ if adls_gen2_file_data_set_mapping is not None:
+ all_data_set_mapping.append(adls_gen2_file_data_set_mapping)
+ if adls_gen2_file_system_data_set_mapping is not None:
+ all_data_set_mapping.append(adls_gen2_file_system_data_set_mapping)
+ if adls_gen2_folder_data_set_mapping is not None:
+ all_data_set_mapping.append(adls_gen2_folder_data_set_mapping)
+ if blob_container_data_set_mapping is not None:
+ all_data_set_mapping.append(blob_container_data_set_mapping)
+ if blob_data_set_mapping is not None:
+ all_data_set_mapping.append(blob_data_set_mapping)
+ if blob_folder_data_set_mapping is not None:
+ all_data_set_mapping.append(blob_folder_data_set_mapping)
+ if kusto_cluster_data_set_mapping is not None:
+ all_data_set_mapping.append(kusto_cluster_data_set_mapping)
+ if kusto_database_data_set_mapping is not None:
+ all_data_set_mapping.append(kusto_database_data_set_mapping)
+ if sqldb_table_data_set_mapping is not None:
+ all_data_set_mapping.append(sqldb_table_data_set_mapping)
+ if sqldw_table_data_set_mapping is not None:
+ all_data_set_mapping.append(sqldw_table_data_set_mapping)
+ if synapse_workspace_sql_pool_table_data_set_mapping is not None:
+ all_data_set_mapping.append(synapse_workspace_sql_pool_table_data_set_mapping)
+ if len(all_data_set_mapping) > 1:
+ raise CLIError('at most one of adls_gen2_file_data_set_mapping, adls_gen2_file_system_data_set_mapping, '
+ 'adls_gen2_folder_data_set_mapping, blob_container_data_set_mapping, blob_data_set_mapping, '
+ 'blob_folder_data_set_mapping, kusto_cluster_data_set_mapping, kusto_database_data_set_mapping, '
+ 'sqldb_table_data_set_mapping, sqldw_table_data_set_mapping, synapse_workspace_sql_pool_table_da'
+ 'ta_set_mapping is needed for data_set_mapping!')
+ if len(all_data_set_mapping) != 1:
+ raise CLIError('data_set_mapping is required. but none of adls_gen2_file_data_set_mapping, '
+ 'adls_gen2_file_system_data_set_mapping, adls_gen2_folder_data_set_mapping, '
+ 'blob_container_data_set_mapping, blob_data_set_mapping, blob_folder_data_set_mapping, '
+ 'kusto_cluster_data_set_mapping, kusto_database_data_set_mapping, sqldb_table_data_set_mapping, '
+ 'sqldw_table_data_set_mapping, synapse_workspace_sql_pool_table_data_set_mapping is provided!')
+ data_set_mapping = all_data_set_mapping[0] if len(all_data_set_mapping) == 1 else None
+ return client.create(resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_subscription_name=share_subscription_name,
+ data_set_mapping_name=data_set_mapping_name,
+ data_set_mapping=data_set_mapping)
+
+
+def datashare_data_set_mapping_delete(client,
+ resource_group_name,
+ account_name,
+ share_subscription_name,
+ data_set_mapping_name):
+ return client.delete(resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_subscription_name=share_subscription_name,
+ data_set_mapping_name=data_set_mapping_name)
+
+
+def datashare_invitation_list(client,
+ resource_group_name,
+ account_name,
+ share_name,
+ skip_token=None,
+ filter_=None,
+ orderby=None):
+ return client.list_by_share(resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_name=share_name,
+ skip_token=skip_token,
+ filter=filter_,
+ orderby=orderby)
+
+
+def datashare_invitation_show(client,
+ resource_group_name,
+ account_name,
+ share_name,
+ invitation_name):
+ return client.get(resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_name=share_name,
+ invitation_name=invitation_name)
+
+
+def datashare_invitation_create(client,
+ resource_group_name,
+ account_name,
+ share_name,
+ invitation_name,
+ expiration_date=None,
+ target_active_directory_id=None,
+ target_email=None,
+ target_object_id=None):
+ invitation = {}
+ invitation['expiration_date'] = expiration_date
+ invitation['target_active_directory_id'] = target_active_directory_id
+ invitation['target_email'] = target_email
+ invitation['target_object_id'] = target_object_id
+ return client.create(resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_name=share_name,
+ invitation_name=invitation_name,
+ invitation=invitation)
+
+
+def datashare_invitation_delete(client,
+ resource_group_name,
+ account_name,
+ share_name,
+ invitation_name):
+ return client.delete(resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_name=share_name,
+ invitation_name=invitation_name)
+
+
+def datashare_list(client,
+ resource_group_name,
+ account_name,
+ skip_token=None,
+ filter_=None,
+ orderby=None):
+ return client.list_by_account(resource_group_name=resource_group_name,
+ account_name=account_name,
+ skip_token=skip_token,
+ filter=filter_,
+ orderby=orderby)
+
+
+def datashare_show(client,
+ resource_group_name,
+ account_name,
+ share_name):
+ return client.get(resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_name=share_name)
+
+
+def datashare_create(client,
+ resource_group_name,
+ account_name,
+ share_name,
+ description=None,
+ share_kind=None,
+ terms=None):
+ share = {}
+ share['description'] = description
+ share['share_kind'] = share_kind
+ share['terms'] = terms
+ return client.create(resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_name=share_name,
+ share=share)
+
+
+def datashare_delete(client,
+ resource_group_name,
+ account_name,
+ share_name,
+ no_wait=False):
+ return sdk_no_wait(no_wait,
+ client.begin_delete,
+ resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_name=share_name)
+
+
+def datashare_list_synchronization(client,
+ resource_group_name,
+ account_name,
+ share_name,
+ skip_token=None,
+ filter_=None,
+ orderby=None):
+ return client.list_synchronizations(resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_name=share_name,
+ skip_token=skip_token,
+ filter=filter_,
+ orderby=orderby)
+
+
+def datashare_list_synchronization_detail(client,
+ resource_group_name,
+ account_name,
+ share_name,
+ skip_token=None,
+ filter_=None,
+ orderby=None,
+ consumer_email=None,
+ consumer_name=None,
+ consumer_tenant_name=None,
+ duration_ms=None,
+ end_time=None,
+ message=None,
+ start_time=None,
+ status=None,
+ synchronization_id=None):
+ share_synchronization = {}
+ share_synchronization['consumer_email'] = consumer_email
+ share_synchronization['consumer_name'] = consumer_name
+ share_synchronization['consumer_tenant_name'] = consumer_tenant_name
+ share_synchronization['duration_ms'] = duration_ms
+ share_synchronization['end_time'] = end_time
+ share_synchronization['message'] = message
+ share_synchronization['start_time'] = start_time
+ share_synchronization['status'] = status
+ share_synchronization['synchronization_id'] = synchronization_id
+ return client.list_synchronization_details(resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_name=share_name,
+ skip_token=skip_token,
+ filter=filter_,
+ orderby=orderby,
+ share_synchronization=share_synchronization)
+
+
+def datashare_provider_share_subscription_list(client,
+ resource_group_name,
+ account_name,
+ share_name,
+ skip_token=None):
+ return client.list_by_share(resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_name=share_name,
+ skip_token=skip_token)
+
+
+def datashare_provider_share_subscription_show(client,
+ resource_group_name,
+ account_name,
+ share_name,
+ provider_share_subscription_id):
+ return client.get_by_share(resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_name=share_name,
+ provider_share_subscription_id=provider_share_subscription_id)
+
+
+def datashare_provider_share_subscription_adjust(client,
+ resource_group_name,
+ account_name,
+ share_name,
+ provider_share_subscription_id,
+ expiration_date=None):
+ provider_share_subscription = {}
+ provider_share_subscription['expiration_date'] = expiration_date
+ return client.adjust(resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_name=share_name,
+ provider_share_subscription_id=provider_share_subscription_id,
+ provider_share_subscription=provider_share_subscription)
+
+
+def datashare_provider_share_subscription_reinstate(client,
+ resource_group_name,
+ account_name,
+ share_name,
+ provider_share_subscription_id,
+ expiration_date=None):
+ provider_share_subscription = {}
+ provider_share_subscription['expiration_date'] = expiration_date
+ return client.reinstate(resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_name=share_name,
+ provider_share_subscription_id=provider_share_subscription_id,
+ provider_share_subscription=provider_share_subscription)
+
+
+def datashare_provider_share_subscription_revoke(client,
+ resource_group_name,
+ account_name,
+ share_name,
+ provider_share_subscription_id,
+ no_wait=False):
+ return sdk_no_wait(no_wait,
+ client.begin_revoke,
+ resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_name=share_name,
+ provider_share_subscription_id=provider_share_subscription_id)
+
+
+def datashare_share_subscription_list(client,
+ resource_group_name,
+ account_name,
+ skip_token=None,
+ filter_=None,
+ orderby=None):
+ return client.list_by_account(resource_group_name=resource_group_name,
+ account_name=account_name,
+ skip_token=skip_token,
+ filter=filter_,
+ orderby=orderby)
+
+
+def datashare_share_subscription_show(client,
+ resource_group_name,
+ account_name,
+ share_subscription_name):
+ return client.get(resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_subscription_name=share_subscription_name)
+
+
+def datashare_share_subscription_create(client,
+ resource_group_name,
+ account_name,
+ share_subscription_name,
+ invitation_id,
+ source_share_location,
+ expiration_date=None):
+ share_subscription = {}
+ share_subscription['expiration_date'] = expiration_date
+ share_subscription['invitation_id'] = invitation_id
+ share_subscription['source_share_location'] = source_share_location
+ return client.create(resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_subscription_name=share_subscription_name,
+ share_subscription=share_subscription)
+
+
+def datashare_share_subscription_delete(client,
+ resource_group_name,
+ account_name,
+ share_subscription_name,
+ no_wait=False):
+ return sdk_no_wait(no_wait,
+ client.begin_delete,
+ resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_subscription_name=share_subscription_name)
+
+
+def datashare_share_subscription_cancel_synchronization(client,
+ resource_group_name,
+ account_name,
+ share_subscription_name,
+ synchronization_id,
+ no_wait=False):
+ share_subscription_synchronization = {}
+ share_subscription_synchronization['synchronization_id'] = synchronization_id
+ return sdk_no_wait(no_wait,
+ client.begin_cancel_synchronization,
+ resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_subscription_name=share_subscription_name,
+ share_subscription_synchronization=share_subscription_synchronization)
+
+
+def datashare_share_subscription_list_source_share_synchronization_setting(client,
+ resource_group_name,
+ account_name,
+ share_subscription_name,
+ skip_token=None):
+ return client.list_source_share_synchronization_settings(resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_subscription_name=share_subscription_name,
+ skip_token=skip_token)
+
+
+def datashare_share_subscription_list_synchronization(client,
+ resource_group_name,
+ account_name,
+ share_subscription_name,
+ skip_token=None,
+ filter_=None,
+ orderby=None):
+ return client.list_synchronizations(resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_subscription_name=share_subscription_name,
+ skip_token=skip_token,
+ filter=filter_,
+ orderby=orderby)
+
+
+def datashare_share_subscription_list_synchronization_detail(client,
+ resource_group_name,
+ account_name,
+ share_subscription_name,
+ synchronization_id,
+ skip_token=None,
+ filter_=None,
+ orderby=None):
+ share_subscription_synchronization = {}
+ share_subscription_synchronization['synchronization_id'] = synchronization_id
+ return client.list_synchronization_details(resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_subscription_name=share_subscription_name,
+ skip_token=skip_token,
+ filter=filter_,
+ orderby=orderby,
+ share_subscription_synchronization=share_subscription_synchronization)
+
+
+def datashare_share_subscription_synchronize(client,
+ resource_group_name,
+ account_name,
+ share_subscription_name,
+ synchronization_mode=None,
+ no_wait=False):
+ synchronize = {}
+ synchronize['synchronization_mode'] = synchronization_mode
+ return sdk_no_wait(no_wait,
+ client.begin_synchronize,
+ resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_subscription_name=share_subscription_name,
+ synchronize=synchronize)
+
+
+def datashare_consumer_source_data_set_list(client,
+ resource_group_name,
+ account_name,
+ share_subscription_name,
+ skip_token=None):
+ return client.list_by_share_subscription(resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_subscription_name=share_subscription_name,
+ skip_token=skip_token)
+
+
+def datashare_synchronization_setting_list(client,
+ resource_group_name,
+ account_name,
+ share_name,
+ skip_token=None):
+ return client.list_by_share(resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_name=share_name,
+ skip_token=skip_token)
+
+
+def datashare_synchronization_setting_show(client,
+ resource_group_name,
+ account_name,
+ share_name,
+ synchronization_setting_name):
+ return client.get(resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_name=share_name,
+ synchronization_setting_name=synchronization_setting_name)
+
+
+def datashare_synchronization_setting_create(client,
+ resource_group_name,
+ account_name,
+ share_name,
+ synchronization_setting_name,
+ scheduled_synchronization_setting=None):
+ all_synchronization_setting = []
+ if scheduled_synchronization_setting is not None:
+ all_synchronization_setting.append(scheduled_synchronization_setting)
+ if len(all_synchronization_setting) > 1:
+ raise CLIError('at most one of scheduled_synchronization_setting is needed for synchronization_setting!')
+ if len(all_synchronization_setting) != 1:
+ raise CLIError('synchronization_setting is required. but none of scheduled_synchronization_setting is '
+ 'provided!')
+ synchronization_setting = all_synchronization_setting[0] if len(all_synchronization_setting) == 1 else None
+ return client.create(resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_name=share_name,
+ synchronization_setting_name=synchronization_setting_name,
+ synchronization_setting=synchronization_setting)
+
+
+def datashare_synchronization_setting_delete(client,
+ resource_group_name,
+ account_name,
+ share_name,
+ synchronization_setting_name,
+ no_wait=False):
+ return sdk_no_wait(no_wait,
+ client.begin_delete,
+ resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_name=share_name,
+ synchronization_setting_name=synchronization_setting_name)
+
+
+def datashare_trigger_list(client,
+ resource_group_name,
+ account_name,
+ share_subscription_name,
+ skip_token=None):
+ return client.list_by_share_subscription(resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_subscription_name=share_subscription_name,
+ skip_token=skip_token)
+
+
+def datashare_trigger_show(client,
+ resource_group_name,
+ account_name,
+ share_subscription_name,
+ trigger_name):
+ return client.get(resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_subscription_name=share_subscription_name,
+ trigger_name=trigger_name)
+
+
+def datashare_trigger_create(client,
+ resource_group_name,
+ account_name,
+ share_subscription_name,
+ trigger_name,
+ scheduled_trigger=None,
+ no_wait=False):
+ all_trigger = []
+ if scheduled_trigger is not None:
+ all_trigger.append(scheduled_trigger)
+ if len(all_trigger) > 1:
+ raise CLIError('at most one of scheduled_trigger is needed for trigger!')
+ if len(all_trigger) != 1:
+ raise CLIError('trigger is required. but none of scheduled_trigger is provided!')
+ trigger = all_trigger[0] if len(all_trigger) == 1 else None
+ return sdk_no_wait(no_wait,
+ client.begin_create,
+ resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_subscription_name=share_subscription_name,
+ trigger_name=trigger_name,
+ trigger=trigger)
+
+
+def datashare_trigger_delete(client,
+ resource_group_name,
+ account_name,
+ share_subscription_name,
+ trigger_name,
+ no_wait=False):
+ return sdk_no_wait(no_wait,
+ client.begin_delete,
+ resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_subscription_name=share_subscription_name,
+ trigger_name=trigger_name)
diff --git a/src/datashare/azext_datashare/tests/__init__.py b/src/datashare/azext_datashare/tests/__init__.py
index c9cfdc73e77..70488e93851 100644
--- a/src/datashare/azext_datashare/tests/__init__.py
+++ b/src/datashare/azext_datashare/tests/__init__.py
@@ -8,5 +8,109 @@
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
+import inspect
+import logging
+import os
+import sys
+import traceback
+import datetime as dt
+from azure.core.exceptions import AzureError
+from azure.cli.testsdk.exceptions import CliTestError, CliExecutionError, JMESPathCheckAssertionError
+
+
+logger = logging.getLogger('azure.cli.testsdk')
+logger.addHandler(logging.StreamHandler())
__path__ = __import__('pkgutil').extend_path(__path__, __name__)
+exceptions = []
+test_map = dict()
+SUCCESSED = "successed"
+FAILED = "failed"
+
+
+def try_manual(func):
+ def import_manual_function(origin_func):
+ from importlib import import_module
+ decorated_path = inspect.getfile(origin_func).lower()
+ module_path = __path__[0].lower()
+ if not decorated_path.startswith(module_path):
+ raise Exception("Decorator can only be used in submodules!")
+ manual_path = os.path.join(
+ decorated_path[module_path.rfind(os.path.sep) + 1:])
+ manual_file_path, manual_file_name = os.path.split(manual_path)
+ module_name, _ = os.path.splitext(manual_file_name)
+ manual_module = "..manual." + \
+ ".".join(manual_file_path.split(os.path.sep) + [module_name, ])
+ return getattr(import_module(manual_module, package=__name__), origin_func.__name__)
+
+ def get_func_to_call():
+ func_to_call = func
+ try:
+ func_to_call = import_manual_function(func)
+ logger.info("Found manual override for %s(...)", func.__name__)
+ except (ImportError, AttributeError):
+ pass
+ return func_to_call
+
+ def wrapper(*args, **kwargs):
+ func_to_call = get_func_to_call()
+ logger.info("running %s()...", func.__name__)
+ try:
+ test_map[func.__name__] = dict()
+ test_map[func.__name__]["result"] = SUCCESSED
+ test_map[func.__name__]["error_message"] = ""
+ test_map[func.__name__]["error_stack"] = ""
+ test_map[func.__name__]["error_normalized"] = ""
+ test_map[func.__name__]["start_dt"] = dt.datetime.utcnow()
+ ret = func_to_call(*args, **kwargs)
+ except (AssertionError, AzureError, CliTestError, CliExecutionError, SystemExit,
+ JMESPathCheckAssertionError) as e:
+ use_exception_cache = os.getenv("TEST_EXCEPTION_CACHE")
+ if use_exception_cache is None or use_exception_cache.lower() != "true":
+ raise
+ test_map[func.__name__]["end_dt"] = dt.datetime.utcnow()
+ test_map[func.__name__]["result"] = FAILED
+ test_map[func.__name__]["error_message"] = str(e).replace("\r\n", " ").replace("\n", " ")[:500]
+ test_map[func.__name__]["error_stack"] = traceback.format_exc().replace(
+ "\r\n", " ").replace("\n", " ")[:500]
+ logger.info("--------------------------------------")
+ logger.info("step exception: %s", e)
+ logger.error("--------------------------------------")
+ logger.error("step exception in %s: %s", func.__name__, e)
+ logger.info(traceback.format_exc())
+ exceptions.append((func.__name__, sys.exc_info()))
+ else:
+ test_map[func.__name__]["end_dt"] = dt.datetime.utcnow()
+ return ret
+
+ if inspect.isclass(func):
+ return get_func_to_call()
+ return wrapper
+
+
+def calc_coverage(filename):
+ filename = filename.split(".")[0]
+ coverage_name = filename + "_coverage.md"
+ with open(coverage_name, "w") as f:
+ f.write("|Scenario|Result|ErrorMessage|ErrorStack|ErrorNormalized|StartDt|EndDt|\n")
+ total = len(test_map)
+ covered = 0
+ for k, v in test_map.items():
+ if not k.startswith("step_"):
+ total -= 1
+ continue
+ if v["result"] == SUCCESSED:
+ covered += 1
+ f.write("|{step_name}|{result}|{error_message}|{error_stack}|{error_normalized}|{start_dt}|"
+ "{end_dt}|\n".format(step_name=k, **v))
+ f.write("Coverage: {}/{}\n".format(covered, total))
+ print("Create coverage\n", file=sys.stderr)
+
+
+def raise_if():
+ if exceptions:
+ if len(exceptions) <= 1:
+ raise exceptions[0][1][1]
+ message = "{}\nFollowed with exceptions in other steps:\n".format(str(exceptions[0][1][1]))
+ message += "\n".join(["{}: {}".format(h[0], h[1][1]) for h in exceptions[1:]])
+ raise exceptions[0][1][0](message).with_traceback(exceptions[0][1][2])
diff --git a/src/datashare/azext_datashare/tests/latest/__init__.py b/src/datashare/azext_datashare/tests/latest/__init__.py
index ee0c4f36bd0..c9cfdc73e77 100644
--- a/src/datashare/azext_datashare/tests/latest/__init__.py
+++ b/src/datashare/azext_datashare/tests/latest/__init__.py
@@ -1,12 +1,12 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for
-# license information.
-#
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is
-# regenerated.
-# --------------------------------------------------------------------------
-
-__path__ = __import__('pkgutil').extend_path(__path__, __name__)
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+
+__path__ = __import__('pkgutil').extend_path(__path__, __name__)
diff --git a/src/datashare/azext_datashare/tests/latest/example_steps.py b/src/datashare/azext_datashare/tests/latest/example_steps.py
new file mode 100644
index 00000000000..9ec45fca629
--- /dev/null
+++ b/src/datashare/azext_datashare/tests/latest/example_steps.py
@@ -0,0 +1,788 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+
+
+from .. import try_manual
+
+
+# EXAMPLE: /Accounts/put/Accounts_Create
+@try_manual
+def step_account_create(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare account create '
+ '--location "West US 2" '
+ '--tags tag1="Red" tag2="White" '
+ '--name "{myAccount}" '
+ '--resource-group "{rg}"',
+ checks=[])
+ test.cmd('az datashare account wait --created '
+ '--name "{myAccount}" '
+ '--resource-group "{rg}"',
+ checks=checks)
+
+
+# EXAMPLE: /Accounts/get/Accounts_Get
+@try_manual
+def step_account_show(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare account show '
+ '--name "{myAccount}" '
+ '--resource-group "{rg}"',
+ checks=checks)
+
+
+# EXAMPLE: /Accounts/get/Accounts_ListByResourceGroup
+@try_manual
+def step_account_list(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare account list '
+ '--resource-group "{rg}"',
+ checks=checks)
+
+
+# EXAMPLE: /Accounts/get/Accounts_ListBySubscription
+@try_manual
+def step_account_list2(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare account list '
+ '-g ""',
+ checks=checks)
+
+
+# EXAMPLE: /Accounts/patch/Accounts_Update
+@try_manual
+def step_account_update(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare account update '
+ '--name "{myAccount}" '
+ '--tags tag1="Red" tag2="White" '
+ '--resource-group "{rg}"',
+ checks=checks)
+
+
+# EXAMPLE: /DataSetMappings/put/DataSetMappings_SqlDB_Create
+@try_manual
+def step_data_set_mapping_create(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare data-set-mapping create '
+ '--account-name "{myAccount}" '
+ '--sqldb-table-data-set-mapping database-name="Database1" data-set-id="a08f184b-0567-4b11-ba22-a1199336d22'
+ '6" schema-name="dbo" sql-server-resource-id="/subscriptions/{subscription_id}/resourceGroups/{rg}/provide'
+ 'rs/Microsoft.Sql/servers/Server1" table-name="Table1" '
+ '--name "{myDataSetMapping}" '
+ '--resource-group "{rg}" '
+ '--share-subscription-name "{myShareSubscription}"',
+ checks=checks)
+
+
+# EXAMPLE: /DataSetMappings/put/DataSetMappings_SqlDW_Create
+@try_manual
+def step_data_set_mapping_create2(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare data-set-mapping create '
+ '--account-name "{myAccount}" '
+ '--sqldw-table-data-set-mapping data-set-id="a08f184b-0567-4b11-ba22-a1199336d226" '
+ 'data-warehouse-name="DataWarehouse1" schema-name="dbo" sql-server-resource-id="/subscriptions/{subscripti'
+ 'on_id}/resourceGroups/{rg}/providers/Microsoft.Sql/servers/Server1" table-name="Table1" '
+ '--name "{myDataSetMapping}" '
+ '--resource-group "{rg}" '
+ '--share-subscription-name "{myShareSubscription}"',
+ checks=checks)
+
+
+# EXAMPLE: /DataSetMappings/put/DataSetMappings_SqlDWDataSetToAdlsGen2File_Create
+@try_manual
+def step_data_set_mapping_create3(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare data-set-mapping create '
+ '--account-name "{myAccount}" '
+ '--adls-gen2-file-data-set-mapping data-set-id="a08f184b-0567-4b11-ba22-a1199336d226" file-path="file21" '
+ 'file-system="fileSystem" output-type="Csv" resource-group="{rg}" storage-account-name="storage2" '
+ 'subscription-id="433a8dfd-e5d5-4e77-ad86-90acdc75eb1a" '
+ '--name "{myDataSetMapping}" '
+ '--resource-group "{rg}" '
+ '--share-subscription-name "{myShareSubscription}"',
+ checks=checks)
+
+
+# EXAMPLE: /DataSetMappings/put/DataSetMappings_SynapseWorkspaceSqlPoolTable_Create
+@try_manual
+def step_data_set_mapping_create4(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare data-set-mapping create '
+ '--account-name "{myAccount3}" '
+ '--synapse-workspace-sql-pool-table-data-set-mapping data-set-id="3dc64e49-1fc3-4186-b3dc-d388c4d3076a" '
+ 'synapse-workspace-sql-pool-table-resource-id="/subscriptions/{subscription_id}/resourceGroups/{rg}/provid'
+ 'ers/Microsoft.Synapse/workspaces/ExampleWorkspace/sqlPools/ExampleSqlPool/schemas/dbo/tables/table1" '
+ '--name "{myDataSetMapping2}" '
+ '--resource-group "{rg}" '
+ '--share-subscription-name "{myShareSubscription}"',
+ checks=checks)
+
+
+# EXAMPLE: /Shares/put/Shares_Create
+@try_manual
+def step_create(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare create '
+ '--account-name "{myAccount}" '
+ '--resource-group "{rg}" '
+ '--description "share description" '
+ '--share-kind "CopyBased" '
+ '--terms "Confidential" '
+ '--name "{myShare}"',
+ checks=checks)
+
+
+# EXAMPLE: /Shares/get/Shares_Get
+@try_manual
+def step_show(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare show '
+ '--account-name "{myAccount}" '
+ '--resource-group "{rg}" '
+ '--name "{myShare}"',
+ checks=checks)
+
+
+# EXAMPLE: /Shares/get/Shares_ListByAccount
+@try_manual
+def step_list(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare list '
+ '--account-name "{myAccount}" '
+ '--resource-group "{rg}"',
+ checks=checks)
+
+
+# EXAMPLE: /Shares/post/Shares_ListSynchronizationDetails
+@try_manual
+def step_list_synchronization_detail(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare list-synchronization-detail '
+ '--account-name "{myAccount}" '
+ '--resource-group "{rg}" '
+ '--name "{myShare}" '
+ '--synchronization-id "7d0536a6-3fa5-43de-b152-3d07c4f6b2bb"',
+ checks=checks)
+
+
+# EXAMPLE: /Shares/post/Shares_ListSynchronizations
+@try_manual
+def step_list_synchronization(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare list-synchronization '
+ '--account-name "{myAccount}" '
+ '--resource-group "{rg}" '
+ '--name "{myShare}"',
+ checks=checks)
+
+
+# EXAMPLE: /DataSets/put/DataSets_Create
+@try_manual
+def step_data_set_create(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare data-set create '
+ '--account-name "{myAccount}" '
+ '--blob-data-set container-name="C1" file-path="file21" resource-group="{rg}" '
+ 'storage-account-name="storage2" subscription-id="433a8dfd-e5d5-4e77-ad86-90acdc75eb1a" '
+ '--name "{myDataSet}" '
+ '--resource-group "{rg}" '
+ '--share-name "{myShare}"',
+ checks=checks)
+
+
+# EXAMPLE: /DataSets/put/DataSets_KustoCluster_Create
+@try_manual
+def step_data_set_create2(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare data-set create '
+ '--account-name "{myAccount}" '
+ '--kusto-cluster-data-set kusto-cluster-resource-id="/subscriptions/{subscription_id}/resourceGroups/{rg}/'
+ 'providers/Microsoft.Kusto/clusters/Cluster1" '
+ '--name "{myDataSet}" '
+ '--resource-group "{rg}" '
+ '--share-name "{myShare}"',
+ checks=checks)
+
+
+# EXAMPLE: /DataSets/put/DataSets_KustoDatabase_Create
+@try_manual
+def step_data_set_create3(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare data-set create '
+ '--account-name "{myAccount}" '
+ '--kusto-database-data-set kusto-database-resource-id="/subscriptions/{subscription_id}/resourceGroups/{rg'
+ '}/providers/Microsoft.Kusto/clusters/Cluster1/databases/Database1" '
+ '--name "{myDataSet}" '
+ '--resource-group "{rg}" '
+ '--share-name "{myShare}"',
+ checks=checks)
+
+
+# EXAMPLE: /DataSets/put/DataSets_SqlDBTable_Create
+@try_manual
+def step_data_set_create4(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare data-set create '
+ '--account-name "{myAccount}" '
+ '--sqldb-table-data-set database-name="SqlDB1" schema-name="dbo" sql-server-resource-id="/subscriptions/{s'
+ 'ubscription_id}/resourceGroups/{rg}/providers/Microsoft.Sql/servers/Server1" table-name="Table1" '
+ '--name "{myDataSet}" '
+ '--resource-group "{rg}" '
+ '--share-name "{myShare}"',
+ checks=checks)
+
+
+# EXAMPLE: /DataSets/put/DataSets_SqlDWTable_Create
+@try_manual
+def step_data_set_create5(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare data-set create '
+ '--account-name "{myAccount}" '
+ '--sqldw-table-data-set data-warehouse-name="DataWarehouse1" schema-name="dbo" '
+ 'sql-server-resource-id="/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.Sql/serv'
+ 'ers/Server1" table-name="Table1" '
+ '--name "{myDataSet}" '
+ '--resource-group "{rg}" '
+ '--share-name "{myShare}"',
+ checks=checks)
+
+
+# EXAMPLE: /DataSets/put/DataSets_SynapseWorkspaceSqlPoolTable_Create
+@try_manual
+def step_data_set_create6(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare data-set create '
+ '--account-name "{myAccount2}" '
+ '--synapse-workspace-sql-pool-table-data-set synapse-workspace-sql-pool-table-resource-id="/subscriptions/'
+ '{subscription_id}/resourceGroups/{rg}/providers/Microsoft.Synapse/workspaces/ExampleWorkspace/sqlPools/Ex'
+ 'ampleSqlPool/schemas/dbo/tables/table1" '
+ '--name "{myDataSet2}" '
+ '--resource-group "{rg}" '
+ '--share-name "{myShare2}"',
+ checks=checks)
+
+
+# EXAMPLE: /DataSets/get/DataSets_Get
+@try_manual
+def step_data_set_show(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare data-set show '
+ '--account-name "{myAccount}" '
+ '--name "{myDataSet}" '
+ '--resource-group "{rg}" '
+ '--share-name "{myShare}"',
+ checks=checks)
+
+
+# EXAMPLE: /DataSets/get/DataSets_ListByShare
+@try_manual
+def step_data_set_list(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare data-set list '
+ '--account-name "{myAccount}" '
+ '--resource-group "{rg}" '
+ '--share-name "{myShare}"',
+ checks=checks)
+
+
+# EXAMPLE: /DataSets/delete/DataSets_Delete
+@try_manual
+def step_data_set_delete(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare data-set delete -y '
+ '--account-name "{myAccount}" '
+ '--name "{myDataSet}" '
+ '--resource-group "{rg}" '
+ '--share-name "{myShare}"',
+ checks=checks)
+
+
+# EXAMPLE: /Invitations/put/Invitations_Create
+@try_manual
+def step_invitation_create(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare invitation create '
+ '--account-name "{myAccount}" '
+ '--expiration-date "2020-08-26T22:33:24.5785265Z" '
+ '--target-email "receiver@microsoft.com" '
+ '--name "{myInvitation}" '
+ '--resource-group "{rg}" '
+ '--share-name "{myShare}"',
+ checks=checks)
+
+
+# EXAMPLE: /Invitations/get/Invitations_Get
+@try_manual
+def step_invitation_show(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare invitation show '
+ '--account-name "{myAccount}" '
+ '--name "{myInvitation}" '
+ '--resource-group "{rg}" '
+ '--share-name "{myShare}"',
+ checks=checks)
+
+
+# EXAMPLE: /Invitations/get/Invitations_ListByShare
+@try_manual
+def step_invitation_list(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare invitation list '
+ '--account-name "{myAccount}" '
+ '--resource-group "{rg}" '
+ '--share-name "{myShare}"',
+ checks=checks)
+
+
+# EXAMPLE: /Invitations/delete/Invitations_Delete
+@try_manual
+def step_invitation_delete(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare invitation delete -y '
+ '--account-name "{myAccount}" '
+ '--name "{myInvitation}" '
+ '--resource-group "{rg}" '
+ '--share-name "{myShare}"',
+ checks=checks)
+
+
+# EXAMPLE: /ProviderShareSubscriptions/get/ProviderShareSubscriptions_GetByShare
+@try_manual
+def step_provider_share_subscription_show(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare provider-share-subscription show '
+ '--account-name "{myAccount}" '
+ '--provider-share-subscription-id "4256e2cf-0f82-4865-961b-12f83333f487" '
+ '--resource-group "{rg}" '
+ '--share-name "{myShare}"',
+ checks=checks)
+
+
+# EXAMPLE: /ProviderShareSubscriptions/get/ProviderShareSubscriptions_ListByShare
+@try_manual
+def step_provider_share_subscription_list(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare provider-share-subscription list '
+ '--account-name "{myAccount}" '
+ '--resource-group "{rg}" '
+ '--share-name "{myShare}"',
+ checks=checks)
+
+
+# EXAMPLE: /ProviderShareSubscriptions/post/ProviderShareSubscriptions_Adjust
+@try_manual
+def step_provider_share_subscription_adjust(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare provider-share-subscription adjust '
+ '--account-name "{myAccount}" '
+ '--expiration-date "2020-12-26T22:33:24.5785265Z" '
+ '--provider-share-subscription-id "4256e2cf-0f82-4865-961b-12f83333f487" '
+ '--resource-group "{rg}" '
+ '--share-name "{myShare}"',
+ checks=checks)
+
+
+# EXAMPLE: /ProviderShareSubscriptions/post/ProviderShareSubscriptions_Reinstate
+@try_manual
+def step_provider_share_subscription_reinstate(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare provider-share-subscription reinstate '
+ '--account-name "{myAccount}" '
+ '--expiration-date "2020-12-26T22:33:24.5785265Z" '
+ '--provider-share-subscription-id "4256e2cf-0f82-4865-961b-12f83333f487" '
+ '--resource-group "{rg}" '
+ '--share-name "{myShare}"',
+ checks=checks)
+
+
+# EXAMPLE: /ProviderShareSubscriptions/post/ProviderShareSubscriptions_Revoke
+@try_manual
+def step_provider_share_subscription_revoke(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare provider-share-subscription revoke '
+ '--account-name "{myAccount}" '
+ '--provider-share-subscription-id "4256e2cf-0f82-4865-961b-12f83333f487" '
+ '--resource-group "{rg}" '
+ '--share-name "{myShare}"',
+ checks=checks)
+
+
+# EXAMPLE: /Shares/delete/Shares_Delete
+@try_manual
+def step_delete(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare delete -y '
+ '--account-name "{myAccount}" '
+ '--resource-group "{rg}" '
+ '--name "{myShare}"',
+ checks=checks)
+
+
+# EXAMPLE: /ShareSubscriptions/put/ShareSubscriptions_Create
+@try_manual
+def step_share_subscription_create(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare share-subscription create '
+ '--account-name "{myAccount}" '
+ '--resource-group "{rg}" '
+ '--expiration-date "2020-08-26T22:33:24.5785265Z" '
+ '--invitation-id "12345678-1234-1234-12345678abd" '
+ '--source-share-location "eastus2" '
+ '--name "{myShareSubscription}"',
+ checks=checks)
+
+
+# EXAMPLE: /ShareSubscriptions/get/ShareSubscriptions_Get
+@try_manual
+def step_share_subscription_show(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare share-subscription show '
+ '--account-name "{myAccount}" '
+ '--resource-group "{rg}" '
+ '--name "{myShareSubscription}"',
+ checks=checks)
+
+
+# EXAMPLE: /ShareSubscriptions/get/ShareSubscriptions_ListByAccount
+@try_manual
+def step_share_subscription_list(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare share-subscription list '
+ '--account-name "{myAccount}" '
+ '--resource-group "{rg}"',
+ checks=checks)
+
+
+# EXAMPLE: /ShareSubscriptions/post/ShareSubscriptions_CancelSynchronization
+@try_manual
+def step_share_subscription_cancel_synchronization(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare share-subscription cancel-synchronization '
+ '--account-name "{myAccount}" '
+ '--resource-group "{rg}" '
+ '--name "{myShareSubscription}" '
+ '--synchronization-id "7d0536a6-3fa5-43de-b152-3d07c4f6b2bb"',
+ checks=checks)
+
+
+# EXAMPLE: /ShareSubscriptions/post/ShareSubscriptions_ListSourceShareSynchronizationSettings
+@try_manual
+def step_share_subscription_list2(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare share-subscription list-source-share-synchronization-setting '
+ '--account-name "{myAccount}" '
+ '--resource-group "{rg}" '
+ '--name "{myShareSubscription2}"',
+ checks=checks)
+
+
+# EXAMPLE: /ShareSubscriptions/post/ShareSubscriptions_ListSynchronizationDetails
+@try_manual
+def step_share_subscription_list3(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare share-subscription list-synchronization-detail '
+ '--account-name "{myAccount}" '
+ '--resource-group "{rg}" '
+ '--name "{myShareSubscription2}" '
+ '--synchronization-id "7d0536a6-3fa5-43de-b152-3d07c4f6b2bb"',
+ checks=checks)
+
+
+# EXAMPLE: /ShareSubscriptions/post/ShareSubscriptions_ListSynchronizations
+@try_manual
+def step_share_subscription_list_synchronization(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare share-subscription list-synchronization '
+ '--account-name "{myAccount}" '
+ '--resource-group "{rg}" '
+ '--name "{myShareSubscription2}"',
+ checks=checks)
+
+
+# EXAMPLE: /ShareSubscriptions/post/ShareSubscriptions_Synchronize
+@try_manual
+def step_share_subscription_synchronize(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare share-subscription synchronize '
+ '--account-name "{myAccount}" '
+ '--resource-group "{rg}" '
+ '--name "{myShareSubscription}" '
+ '--synchronization-mode "Incremental"',
+ checks=checks)
+
+
+# EXAMPLE: /ShareSubscriptions/delete/ShareSubscriptions_Delete
+@try_manual
+def step_share_subscription_delete(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare share-subscription delete -y '
+ '--account-name "{myAccount}" '
+ '--resource-group "{rg}" '
+ '--name "{myShareSubscription}"',
+ checks=checks)
+
+
+# EXAMPLE: /SynchronizationSettings/put/SynchronizationSettings_Create
+@try_manual
+def step_synchronization_setting_create(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare synchronization-setting create '
+ '--account-name "{myAccount}" '
+ '--resource-group "{rg}" '
+ '--share-name "{myShare}" '
+ '--scheduled-synchronization-setting recurrence-interval="Day" synchronization-time="2018-11-14T04:47:52.9'
+ '614956Z" '
+ '--name "{myDataSet}"',
+ checks=checks)
+
+
+# EXAMPLE: /SynchronizationSettings/get/SynchronizationSettings_Get
+@try_manual
+def step_synchronization_setting_show(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare synchronization-setting show '
+ '--account-name "{myAccount}" '
+ '--resource-group "{rg}" '
+ '--share-name "{myShare}" '
+ '--name "{mySynchronizationSetting}"',
+ checks=checks)
+
+
+# EXAMPLE: /SynchronizationSettings/get/SynchronizationSettings_ListByShare
+@try_manual
+def step_synchronization_setting_list(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare synchronization-setting list '
+ '--account-name "{myAccount}" '
+ '--resource-group "{rg}" '
+ '--share-name "{myShare}"',
+ checks=checks)
+
+
+# EXAMPLE: /SynchronizationSettings/delete/SynchronizationSettings_Delete
+@try_manual
+def step_synchronization_setting_delete(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare synchronization-setting delete -y '
+ '--account-name "{myAccount}" '
+ '--resource-group "{rg}" '
+ '--share-name "{myShare}" '
+ '--name "{mySynchronizationSetting}"',
+ checks=checks)
+
+
+# EXAMPLE: /Triggers/put/Triggers_Create
+@try_manual
+def step_trigger_create(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare trigger create '
+ '--account-name "{myAccount}" '
+ '--resource-group "{rg}" '
+ '--share-subscription-name "{myShareSubscription}" '
+ '--scheduled-trigger recurrence-interval="Day" synchronization-mode="Incremental" '
+ 'synchronization-time="2018-11-14T04:47:52.9614956Z" '
+ '--name "{myTrigger}"',
+ checks=checks)
+
+
+# EXAMPLE: /Triggers/get/Triggers_Get
+@try_manual
+def step_trigger_show(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare trigger show '
+ '--account-name "{myAccount}" '
+ '--resource-group "{rg}" '
+ '--share-subscription-name "{myShareSubscription}" '
+ '--name "{myTrigger}"',
+ checks=checks)
+
+
+# EXAMPLE: /Triggers/get/Triggers_ListByShareSubscription
+@try_manual
+def step_trigger_list(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare trigger list '
+ '--account-name "{myAccount}" '
+ '--resource-group "{rg}" '
+ '--share-subscription-name "{myShareSubscription}"',
+ checks=checks)
+
+
+# EXAMPLE: /Triggers/delete/Triggers_Delete
+@try_manual
+def step_trigger_delete(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare trigger delete -y '
+ '--account-name "{myAccount}" '
+ '--resource-group "{rg}" '
+ '--share-subscription-name "{myShareSubscription}" '
+ '--name "{myTrigger}"',
+ checks=checks)
+
+
+# EXAMPLE: /Accounts/delete/Accounts_Delete
+@try_manual
+def step_account_delete(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare account delete -y '
+ '--name "{myAccount}" '
+ '--resource-group "{rg}"',
+ checks=checks)
+
+
+# EXAMPLE: /ConsumerInvitations/get/ConsumerInvitations_Get
+@try_manual
+def step_consumer_invitation_show(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare consumer-invitation show '
+ '--invitation-id "dfbbc788-19eb-4607-a5a1-c74181bfff03" '
+ '--location "East US 2"',
+ checks=checks)
+
+
+# EXAMPLE: /ConsumerInvitations/get/ConsumerInvitations_ListInvitations
+@try_manual
+def step_consumer_invitation_list_invitation(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare consumer-invitation list-invitation',
+ checks=checks)
+
+
+# EXAMPLE: /ConsumerInvitations/post/ConsumerInvitations_RejectInvitation
+@try_manual
+def step_consumer_invitation_reject_invitation(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare consumer-invitation reject-invitation '
+ '--invitation-id "dfbbc788-19eb-4607-a5a1-c74181bfff03" '
+ '--location "East US 2"',
+ checks=checks)
+
+
+# EXAMPLE: /ConsumerSourceDataSets/get/ConsumerSourceDataSets_ListByShareSubscription
+@try_manual
+def step_consumer_source_data_set_list(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare consumer-source-data-set list '
+ '--account-name "{myAccount}" '
+ '--resource-group "{rg}" '
+ '--share-subscription-name "{myShare}"',
+ checks=checks)
+
+
+# EXAMPLE: /DataSetMappings/put/DataSetMappings_Create
+@try_manual
+def step_data_set_mapping_create5(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare data-set-mapping create '
+ '--account-name "{myAccount}" '
+ '--blob-data-set-mapping container-name="C1" data-set-id="a08f184b-0567-4b11-ba22-a1199336d226" '
+ 'file-path="file21" resource-group="{rg}" storage-account-name="storage2" subscription-id="433a8dfd-e5d5-4'
+ 'e77-ad86-90acdc75eb1a" '
+ '--name "{myDataSetMapping}" '
+ '--resource-group "{rg}" '
+ '--share-subscription-name "{myShareSubscription}"',
+ checks=checks)
+
+
+# EXAMPLE: /DataSetMappings/get/DataSetMappings_Get
+@try_manual
+def step_data_set_mapping_show(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare data-set-mapping show '
+ '--account-name "{myAccount}" '
+ '--name "{myDataSetMapping}" '
+ '--resource-group "{rg}" '
+ '--share-subscription-name "{myShareSubscription}"',
+ checks=checks)
+
+
+# EXAMPLE: /DataSetMappings/get/DataSetMappings_ListByShareSubscription
+@try_manual
+def step_data_set_mapping_list(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare data-set-mapping list '
+ '--account-name "{myAccount}" '
+ '--resource-group "{rg}" '
+ '--share-subscription-name "{myShareSubscription}"',
+ checks=checks)
+
+
+# EXAMPLE: /DataSetMappings/delete/DataSetMappings_Delete
+@try_manual
+def step_data_set_mapping_delete(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datashare data-set-mapping delete -y '
+ '--account-name "{myAccount}" '
+ '--name "{myDataSetMapping}" '
+ '--resource-group "{rg}" '
+ '--share-subscription-name "{myShareSubscription}"',
+ checks=checks)
+
diff --git a/src/datashare/azext_datashare/tests/latest/test_datashare_scenario.py b/src/datashare/azext_datashare/tests/latest/test_datashare_scenario.py
index 2e9b8747434..815242e0da8 100644
--- a/src/datashare/azext_datashare/tests/latest/test_datashare_scenario.py
+++ b/src/datashare/azext_datashare/tests/latest/test_datashare_scenario.py
@@ -1,599 +1,273 @@
-# --------------------------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# --------------------------------------------------------------------------------------------
-
-import os
-import unittest
-
-from azure_devtools.scenario_tests import AllowLargeResponse
-from azure.cli.testsdk import (ScenarioTest, ResourceGroupPreparer, StorageAccountPreparer)
-
-TEST_DIR = os.path.abspath(os.path.join(os.path.abspath(__file__), '..'))
-
-
-class DataShareManagementClientScenarioTest(ScenarioTest):
-
- @ResourceGroupPreparer(name_prefix='cli_test_datashare_provider_rg'[:12], location='westus2', key='ProviderResourceGroup')
- @StorageAccountPreparer(name_prefix='clitestdatashareprovidersa'[:12], location='westus2', key='ProviderStorageAccount')
- @AllowLargeResponse()
- def test_datashare(self, resource_group, storage_account):
-
- self.kwargs.update({
- 'ConsumerSubscription': '00000000-0000-0000-0000-000000000000', # change this value in live test
- 'ConsumerResourceGroup': 'datashare_consumer_rg', # this is a pre-existing reosurce group in consumer subscription
- 'ConsumerStorageAccount': 'datashareconsumersa', # this is a pre-existing storage account in consumer subscription
- 'ProviderEmail': 'provider@microsoft.com', # change this value in live test
- 'ConsumerEmail': 'consumer@microsoft.com', # change this value in live test
- 'ProviderAccount': 'cli_test_account',
- 'ConsumerAccount': 'cli_test_consumer_account',
- 'ProviderDataset': 'cli_test_data_set',
- 'ConsumerDatasetMapping': 'cli_test_data_set_mapping',
- 'ProviderInvitation': 'cli_test_invitation',
- 'ProviderShare': 'cli_test_share',
- 'ConsumerShareSubscription': 'cli_test_share_subscription',
- 'ProviderSynchronizationSetting': 'cli_test_synchronization_setting',
- 'ConsumerTrigger': 'cli_test_trigger',
- 'ProviderContainer': 'clitestcontainer',
- 'ConsumerContainer': 'clitestconsumercontainer',
- })
-
- # Provider commands
- datashareAccount = self.cmd('az datashare account create '
- '--location "West US 2" '
- '--tags tag1=Red tag2=White '
- '--name "{ProviderAccount}" '
- '--resource-group "{ProviderResourceGroup}"',
- checks=[self.check('name', '{ProviderAccount}'),
- self.check('location', 'westus2'),
- self.check('resourceGroup', '{ProviderResourceGroup}'),
- self.check('tags.tag1', 'Red'),
- self.check('tags.tag2', 'White')
- ]).get_output_in_json()
-
- self.cmd('az datashare account wait '
- '--name "{ProviderAccount}" '
- '--resource-group "{ProviderResourceGroup}" '
- '--created',
- checks=[])
-
- accountId = datashareAccount['id']
- self.cmd('az datashare account show '
- '-n "{ProviderAccount}" '
- '--resource-group "{ProviderResourceGroup}"',
- checks=[self.check('name', '{ProviderAccount}'),
- self.check('location', 'westus2'),
- self.check('provisioningState', 'Succeeded'),
- self.check('resourceGroup', '{ProviderResourceGroup}'),
- self.check('tags.tag1', 'Red'),
- self.check('tags.tag2', 'White')
- ])
-
- self.cmd('az datashare account show '
- '--ids {}'.format(accountId),
- checks=[self.check('name', '{ProviderAccount}'),
- self.check('location', 'westus2'),
- self.check('provisioningState', 'Succeeded'),
- self.check('resourceGroup', '{ProviderResourceGroup}'),
- self.check('tags.tag1', 'Red'),
- self.check('tags.tag2', 'White')
- ])
-
- self.cmd('az datashare account list '
- '--resource-group "{ProviderResourceGroup}"',
- checks=[self.check("[?id=='{}'].name | [0]".format(accountId), '{ProviderAccount}'),
- self.check("[?id=='{}'].location | [0]".format(accountId), 'westus2'),
- self.check("[?id=='{}'].resourceGroup | [0]".format(accountId), '{ProviderResourceGroup}'),
- self.check("[?id=='{}'].tags | [0].tag1".format(accountId), 'Red'),
- self.check("[?id=='{}'].tags | [0].tag2".format(accountId), 'White')])
-
- self.cmd('az datashare account update '
- '--name "{ProviderAccount}" '
- '--tags tag1=Green '
- '--resource-group "{ProviderResourceGroup}"',
- checks=[self.check('name', '{ProviderAccount}'),
- self.check('location', 'westus2'),
- self.check('provisioningState', 'Succeeded'),
- self.check('resourceGroup', '{ProviderResourceGroup}'),
- self.check('tags.tag1', 'Green')])
-
- datashare = self.cmd('az datashare create '
- '--account-name "{ProviderAccount}" '
- '--resource-group "{ProviderResourceGroup}" '
- '--description "share description" '
- '--share-kind "CopyBased" '
- '--terms "Confidential" '
- '--name "{ProviderShare}"',
- checks=[self.check('name', '{ProviderShare}'),
- self.check('description', 'share description'),
- self.check('shareKind', 'CopyBased'),
- self.check('terms', 'Confidential')]).get_output_in_json()
-
- self.cmd('az datashare show '
- '--account-name "{ProviderAccount}" '
- '--resource-group "{ProviderResourceGroup}" '
- '--name "{ProviderShare}"',
- checks=[self.check('name', '{ProviderShare}'),
- self.check('description', 'share description'),
- self.check('shareKind', 'CopyBased'),
- self.check('terms', 'Confidential')])
-
- datashareId = datashare['id']
- self.cmd('az datashare show '
- '--ids {}'.format(datashareId),
- checks=[self.check('name', '{ProviderShare}'),
- self.check('description', 'share description'),
- self.check('shareKind', 'CopyBased'),
- self.check('terms', 'Confidential')])
-
- self.cmd('az datashare list '
- '--account-name "{ProviderAccount}" '
- '--resource-group "{ProviderResourceGroup}"',
- checks=[self.check("[?id=='{}'].name | [0]".format(datashareId), '{ProviderShare}'),
- self.check("[?id=='{}'].description | [0]".format(datashareId), 'share description'),
- self.check("[?id=='{}'].shareKind | [0]".format(datashareId), 'CopyBased'),
- self.check("[?id=='{}'].terms | [0]".format(datashareId), 'Confidential')])
-
- storage_account_json = self.cmd('az storage account show '
- '-n {ProviderStorageAccount} '
- '-g {ProviderResourceGroup}').get_output_in_json()
-
- accountPrincipalId = datashareAccount['identity']['principalId']
- if self.is_live or self.in_recording:
- import time
- self.cmd('az role assignment create '
- '--role "2a2b9908-6ea1-4ae2-8e65-a410df84e7d1" ' # Storage Blob Data Reader
- '--assignee-object-id {} '
- '--assignee-principal-type ServicePrincipal '
- '--scope {}'.format(accountPrincipalId, storage_account_json['id']))
- time.sleep(10)
-
- self.cmd('az storage container create '
- '--account-name {ProviderStorageAccount} '
- '--name {ProviderContainer}')
-
- datasetContent = {"container_name": "{}".format(self.kwargs.get('ProviderContainer', '')), "storage_account_name": "{}".format(storage_account), "kind": "Container"}
- self.kwargs.update({
- 'ProviderDatasetContent': datasetContent
- })
- self.cmd('az datashare dataset create '
- '--account-name "{ProviderAccount}" '
- '--dataset "{ProviderDatasetContent}" '
- '--name "{ProviderDataset}" '
- '--resource-group "{ProviderResourceGroup}" '
- '--share-name "{ProviderShare}"',
- checks=[self.check('containerName', '{ProviderContainer}'),
- self.check('storageAccountName', '{ProviderStorageAccount}'),
- self.check('kind', 'Container'),
- self.check('name', '{ProviderDataset}')])
-
- self.cmd('az datashare dataset show '
- '--account-name "{ProviderAccount}" '
- '--name "{ProviderDataset}" '
- '--resource-group "{ProviderResourceGroup}" '
- '--share-name "{ProviderShare}"',
- checks=[self.check('containerName', '{ProviderContainer}'),
- self.check('storageAccountName', '{ProviderStorageAccount}'),
- self.check('kind', 'Container'),
- self.check('name', '{ProviderDataset}')])
-
- self.cmd('az datashare dataset list '
- '--account-name "{ProviderAccount}" '
- '--resource-group "{ProviderResourceGroup}" '
- '--share-name "{ProviderShare}"',
- checks=[self.check('[0].containerName', '{ProviderContainer}'),
- self.check('[0].storageAccountName', '{ProviderStorageAccount}'),
- self.check('[0].kind', 'Container'),
- self.check('[0].name', '{ProviderDataset}')])
-
- self.cmd('az datashare synchronization-setting create '
- '--account-name "{ProviderAccount}" '
- '--resource-group "{ProviderResourceGroup}" '
- '--share-name "{ProviderShare}" '
- '--name "{ProviderSynchronizationSetting}" '
- '--recurrence-interval "Day" '
- '--synchronization-time "2020-04-05 10:50:00 +00:00"',
- checks=[self.check('kind', 'ScheduleBased'),
- self.check('name', '{ProviderSynchronizationSetting}'),
- self.check('recurrenceInterval', 'Day'),
- self.check('resourceGroup', '{ProviderResourceGroup}'),
- self.check('synchronizationTime', '2020-04-05T10:50:00+00:00')])
-
- self.cmd('az datashare synchronization-setting show '
- '--account-name "{ProviderAccount}" '
- '--resource-group "{ProviderResourceGroup}" '
- '--share-name "{ProviderShare}" '
- '--name "{ProviderSynchronizationSetting}"',
- checks=[self.check('kind', 'ScheduleBased'),
- self.check('name', '{ProviderSynchronizationSetting}'),
- self.check('recurrenceInterval', 'Day'),
- self.check('resourceGroup', '{ProviderResourceGroup}'),
- self.check('synchronizationTime', '2020-04-05T10:50:00+00:00')])
-
- self.cmd('az datashare synchronization-setting list '
- '--account-name "{ProviderAccount}" '
- '--resource-group "{ProviderResourceGroup}" '
- '--share-name "{ProviderShare}"',
- checks=[self.check('[0].kind', 'ScheduleBased'),
- self.check('[0].name', '{ProviderSynchronizationSetting}'),
- self.check('[0].recurrenceInterval', 'Day'),
- self.check('[0].resourceGroup', '{ProviderResourceGroup}'),
- self.check('[0].synchronizationTime', '2020-04-05T10:50:00+00:00')])
-
- self.cmd('az datashare synchronization list '
- '--account-name "{ProviderAccount}" '
- '--resource-group "{ProviderResourceGroup}" '
- '--share-name "{ProviderShare}"',
- checks=[])
-
- # self.cmd('az datashare synchronization list-detail '
- # '--account-name "{ProviderAccount}" '
- # '--resource-group "{ProviderResourceGroup}" '
- # '--share-name "{ProviderShare}" '
- # '--synchronization-id "7d0536a6-3fa5-43de-b152-3d07c4f6b2bb"',
- # checks=[])
-
- self.cmd('az datashare invitation create '
- '--account-name "{ProviderAccount}" '
- '--target-email "{ConsumerEmail}" '
- '--name "{ProviderInvitation}" '
- '--resource-group "{ProviderResourceGroup}" '
- '--share-name "{ProviderShare}"',
- checks=[self.check('invitationStatus', 'Pending'),
- self.check('name', '{ProviderInvitation}'),
- self.check('resourceGroup', '{ProviderResourceGroup}'),
- self.check('targetEmail', '{ConsumerEmail}')])
-
- self.cmd('az datashare invitation list '
- '--account-name "{ProviderAccount}" '
- '--resource-group "{ProviderResourceGroup}" '
- '--share-name "{ProviderShare}"',
- checks=[self.check('[0].invitationStatus', 'Pending'),
- self.check('[0].name', '{ProviderInvitation}'),
- self.check('[0].resourceGroup', '{ProviderResourceGroup}'),
- self.check('[0].targetEmail', '{ConsumerEmail}')])
-
- self.cmd('az datashare invitation show '
- '--account-name "{ProviderAccount}" '
- '--name "{ProviderInvitation}" '
- '--resource-group "{ProviderResourceGroup}" '
- '--share-name "{ProviderShare}"',
- checks=[self.check('invitationStatus', 'Pending'),
- self.check('name', '{ProviderInvitation}'),
- self.check('resourceGroup', '{ProviderResourceGroup}'),
- self.check('targetEmail', '{ConsumerEmail}')])
-
- # Consumer commands
- datashareConsumerAccount = self.cmd('az datashare account create '
- '--location "West US 2" '
- '--name "{ConsumerAccount}" '
- '--resource-group "{ConsumerResourceGroup}" '
- '--subscription "{ConsumerSubscription}"',
- checks=[self.check('name', '{ConsumerAccount}'),
- self.check('location', 'westus2'),
- self.check('resourceGroup', '{ConsumerResourceGroup}')]).get_output_in_json()
-
- invitations = self.cmd('az datashare consumer invitation list '
- '--subscription "{ConsumerSubscription}"',
- checks=[self.check('[0].invitationStatus', 'Pending'),
- self.check('[0].name', '{ProviderInvitation}'),
- self.check('[0].shareName', '{ProviderShare}'),
- self.check('[0].providerEmail', '{ProviderEmail}')]).get_output_in_json()
-
- invitationId = invitations[0]['invitationId']
- sourceShareLocation = invitations[0]['location']
- self.kwargs.update({'InvitationId1': invitationId,
- 'Location1': sourceShareLocation})
-
- self.cmd('az datashare consumer invitation show '
- '--invitation-id "{InvitationId1}" '
- '--subscription "{ConsumerSubscription}" '
- '--location "{Location1}"',
- checks=[self.check('invitationStatus', 'Pending'),
- self.check('name', '{ProviderInvitation}'),
- self.check('shareName', '{ProviderShare}'),
- self.check('providerEmail', '{ProviderEmail}')])
-
-# self.cmd('az datashare consumer invitation reject '
-# '--invitation-id 00000000-0000-0000-0000-000000000000 '
-# checks=[])
-
- self.cmd('az datashare account wait '
- '--name "{ConsumerAccount}" '
- '--resource-group "{ConsumerResourceGroup}" '
- '--created '
- '--subscription "{ConsumerSubscription}"',
- checks=[])
-
- self.cmd('az datashare consumer share-subscription create '
- '--account-name "{ConsumerAccount}" '
- '--resource-group "{ConsumerResourceGroup}" '
- '--invitation-id "{InvitationId1}" '
- '--source-share-location "{Location1}" '
- '--name "{ConsumerShareSubscription}" '
- '--subscription "{ConsumerSubscription}"',
- checks=[self.check('invitationId', '{InvitationId1}'),
- self.check('name', '{ConsumerShareSubscription}'),
- self.check('resourceGroup', '{ConsumerResourceGroup}'),
- self.check('shareName', '{ProviderShare}'),
- self.check('shareKind', 'CopyBased'),
- self.check('sourceShareLocation', '{Location1}')])
-
- self.cmd('az datashare consumer share-subscription show '
- '--account-name "{ConsumerAccount}" '
- '--resource-group "{ConsumerResourceGroup}" '
- '--name "{ConsumerShareSubscription}" '
- '--subscription "{ConsumerSubscription}"',
- checks=[self.check('invitationId', '{InvitationId1}'),
- self.check('name', '{ConsumerShareSubscription}'),
- self.check('resourceGroup', '{ConsumerResourceGroup}'),
- self.check('shareName', '{ProviderShare}'),
- self.check('shareKind', 'CopyBased'),
- self.check('sourceShareLocation', '{Location1}')])
-
- self.cmd('az datashare consumer share-subscription list '
- '--account-name "{ConsumerAccount}" '
- '--resource-group "{ConsumerResourceGroup}" '
- '--subscription "{ConsumerSubscription}"',
- checks=[self.check('[0].invitationId', '{InvitationId1}'),
- self.check('[0].name', '{ConsumerShareSubscription}'),
- self.check('[0].resourceGroup', '{ConsumerResourceGroup}'),
- self.check('[0].shareName', '{ProviderShare}'),
- self.check('[0].shareKind', 'CopyBased'),
- self.check('[0].sourceShareLocation', '{Location1}')])
-
- sourceDatasets = self.cmd('az datashare consumer share-subscription list-source-dataset '
- '--account-name "{ConsumerAccount}" '
- '--resource-group "{ConsumerResourceGroup}" '
- '--share-subscription-name "{ConsumerShareSubscription}" '
- '--subscription "{ConsumerSubscription}"',
- checks=[self.check('[0].dataSetName', '{ProviderDataset}'),
- self.check('[0].dataSetType', 'Container')]).get_output_in_json()
- sourceDatasetId = sourceDatasets[0]['dataSetId']
-
- storage_account2_json = self.cmd('az storage account show '
- '-n {ConsumerStorageAccount} '
- '-g {ConsumerResourceGroup} '
- '--subscription "{ConsumerSubscription}"').get_output_in_json()
-
- accountPrincipalId2 = datashareConsumerAccount['identity']['principalId']
- self.kwargs.update({
- "AccountPrincipalId2": accountPrincipalId2,
- "StorageAccountId2": storage_account2_json['id']})
-
- if self.is_live or self.in_recording:
- import time
- self.cmd('az role assignment create '
- '--role "ba92f5b4-2d11-453d-a403-e96b0029c9fe" ' # Storage Blob Data Contributor
- '--assignee-object-id "{AccountPrincipalId2}" '
- '--assignee-principal-type ServicePrincipal '
- '--scope "{StorageAccountId2}" '
- '--subscription "{ConsumerSubscription}"')
- time.sleep(10)
-
- datasetMappingContent = {"data_set_id": "{}".format(sourceDatasetId),
- "container_name": "{}".format(self.kwargs.get('ConsumerContainer', '')),
- "storage_account_name": "{}".format(self.kwargs.get('ConsumerStorageAccount', '')),
- "kind": "BlobFolder",
- "prefix": "{}".format(self.kwargs.get('ProviderDataset', ''))}
- self.kwargs.update({
- 'ConsumerDatasetMappingContent': datasetMappingContent
- })
- self.cmd('az datashare consumer dataset-mapping create '
- '--account-name "{ConsumerAccount}" '
- '--name "{ConsumerDatasetMapping}" '
- '--resource-group "{ConsumerResourceGroup}" '
- '--share-subscription-name "{ConsumerShareSubscription}" '
- '--mapping "{ConsumerDatasetMappingContent}" '
- '--subscription "{ConsumerSubscription}"',
- checks=[self.check('kind', 'BlobFolder'),
- self.check('name', '{ConsumerDatasetMapping}'),
- self.check('prefix', '{ProviderDataset}'),
- self.check('storageAccountName', '{ConsumerStorageAccount}')])
-
- self.cmd('az datashare consumer share-subscription synchronization start '
- '--account-name "{ConsumerAccount}" '
- '--resource-group "{ConsumerResourceGroup}" '
- '--share-subscription-name "{ConsumerShareSubscription}" '
- '--synchronization-mode "Incremental" '
- '--subscription "{ConsumerSubscription}"',
- checks=[self.check('status', 'Queued'),
- self.check('synchronizationMode', 'Incremental')])
-
- self.cmd('az datashare consumer dataset-mapping show '
- '--account-name "{ConsumerAccount}" '
- '--name "{ConsumerDatasetMapping}" '
- '--resource-group "{ConsumerResourceGroup}" '
- '--share-subscription-name "{ConsumerShareSubscription}" '
- '--subscription "{ConsumerSubscription}"',
- checks=[self.check('kind', 'BlobFolder'),
- self.check('name', '{ConsumerDatasetMapping}'),
- self.check('prefix', '{ProviderDataset}'),
- self.check('storageAccountName', '{ConsumerStorageAccount}')])
-
- self.cmd('az datashare consumer dataset-mapping list '
- '--account-name "{ConsumerAccount}" '
- '--resource-group "{ConsumerResourceGroup}" '
- '--share-subscription-name "{ConsumerShareSubscription}" '
- '--subscription "{ConsumerSubscription}"',
- checks=[self.check('[0].kind', 'BlobFolder'),
- self.check('[0].name', '{ConsumerDatasetMapping}'),
- self.check('[0].prefix', '{ProviderDataset}'),
- self.check('[0].storageAccountName', '{ConsumerStorageAccount}')])
-
- self.cmd('az datashare consumer share-subscription synchronization list '
- '--account-name "{ConsumerAccount}" '
- '--resource-group "{ConsumerResourceGroup}" '
- '--share-subscription-name "{ConsumerShareSubscription}" '
- '--subscription "{ConsumerSubscription}"',
- checks=[self.check('[0].synchronizationMode', 'Incremental')])
-
-# self.cmd('az datashare consumer share-subscription synchronization list-detail '
-# '--account-name "{ConsumerAccount}" '
-# '--resource-group "{ConsumerResourceGroup}" '
-# '--share-subscription-name "{ConsumerShareSubscription}" '
-# '--synchronization-id "7d0536a6-3fa5-43de-b152-3d07c4f6b2bb" '
-# '--subscription "{ConsumerSubscription}"',
-# checks=[])
-
-# self.cmd('az datashare consumer share-subscription synchronization cancel '
-# '--account-name "{ConsumerAccount}" '
-# '--resource-group "{ConsumerResourceGroup}" '
-# '--share-subscription-name "{ConsumerShareSubscription}" '
-# '--synchronization-id "7d0536a6-3fa5-43de-b152-3d07c4f6b2bb" '
-# '--subscription "{ConsumerSubscription}"',
-# checks=[])
-
- self.cmd('az datashare consumer share-subscription list-source-share-synchronization-setting '
- '--account-name "{ConsumerAccount}" '
- '--resource-group "{ConsumerResourceGroup}" '
- '--share-subscription-name "{ConsumerShareSubscription}" '
- '--subscription "{ConsumerSubscription}"',
- checks=[self.check('[0].recurrenceInterval', 'Day'),
- self.check('[0].kind', 'ScheduleBased')])
-
- self.cmd('az datashare consumer trigger create '
- '--account-name "{ConsumerAccount}" '
- '--resource-group "{ConsumerResourceGroup}" '
- '--share-subscription-name "{ConsumerShareSubscription}" '
- '--name "{ConsumerTrigger}" '
- '--recurrence-interval "Day" '
- '--synchronization-time "2020-04-05 10:50:00 +00:00" '
- '--subscription "{ConsumerSubscription}"',
- checks=[self.check('properties.recurrenceInterval', 'Day'), # TODO properties is not removed in the response structure
- self.check('properties.synchronizationMode', 'Incremental')])
-
- self.cmd('az datashare consumer trigger show '
- '--account-name "{ConsumerAccount}" '
- '--resource-group "{ConsumerResourceGroup}" '
- '--share-subscription-name "{ConsumerShareSubscription}" '
- '--name "{ConsumerTrigger}" '
- '--subscription "{ConsumerSubscription}"',
- checks=[self.check('recurrenceInterval', 'Day'),
- self.check('synchronizationMode', 'Incremental')])
-
- self.cmd('az datashare consumer trigger list '
- '--account-name "{ConsumerAccount}" '
- '--resource-group "{ConsumerResourceGroup}" '
- '--share-subscription-name "{ConsumerShareSubscription}" '
- '--subscription "{ConsumerSubscription}"',
- checks=[self.check('[0].recurrenceInterval', 'Day'),
- self.check('[0].synchronizationMode', 'Incremental')])
-
- # Provider commands
- providerShareSubscriptions = self.cmd('az datashare provider-share-subscription list '
- '--account-name "{ProviderAccount}" '
- '--resource-group "{ProviderResourceGroup}" '
- '--share-name "{ProviderShare}"',
- checks=[self.check('[0].consumerEmail', '{ConsumerEmail}'),
- self.check('[0].providerEmail', '{ProviderEmail}'),
- self.check('[0].shareSubscriptionStatus', 'Active'),
- self.check('[0].name', '{ConsumerShareSubscription}')]).get_output_in_json()
- shareSubscriptionObjectId = providerShareSubscriptions[0]['shareSubscriptionObjectId']
- self.kwargs.update({'ProviderShareSubscriptionObjectId': shareSubscriptionObjectId})
-
- self.cmd('az datashare provider-share-subscription show '
- '--account-name "{ProviderAccount}" '
- '--share-subscription "{ProviderShareSubscriptionObjectId}" '
- '--resource-group "{ProviderResourceGroup}" '
- '--share-name "{ProviderShare}"',
- checks=[self.check('consumerEmail', '{ConsumerEmail}'),
- self.check('providerEmail', '{ProviderEmail}'),
- self.check('shareSubscriptionStatus', 'Active'),
- self.check('name', '{ConsumerShareSubscription}'),
- self.check('shareSubscriptionObjectId', '{ProviderShareSubscriptionObjectId}')])
-
- self.cmd('az datashare provider-share-subscription revoke '
- '--account-name "{ProviderAccount}" '
- '--share-subscription "{ProviderShareSubscriptionObjectId}" '
- '--resource-group "{ProviderResourceGroup}" '
- '--share-name "{ProviderShare}"',
- checks=[self.check('consumerEmail', '{ConsumerEmail}'),
- self.check('providerEmail', '{ProviderEmail}'),
- self.check('shareSubscriptionStatus', 'Revoking'),
- self.check('name', '{ConsumerShareSubscription}'),
- self.check('shareSubscriptionObjectId', '{ProviderShareSubscriptionObjectId}')])
-
- if self.is_live or self.in_recording:
- import time
- time.sleep(5)
-
- self.cmd('az datashare provider-share-subscription reinstate '
- '--account-name "{ProviderAccount}" '
- '--share-subscription "{ProviderShareSubscriptionObjectId}" '
- '--resource-group "{ProviderResourceGroup}" '
- '--share-name "{ProviderShare}"',
- checks=[self.check('consumerEmail', '{ConsumerEmail}'),
- self.check('providerEmail', '{ProviderEmail}'),
- self.check('shareSubscriptionStatus', 'Active'),
- self.check('name', '{ConsumerShareSubscription}'),
- self.check('shareSubscriptionObjectId', '{ProviderShareSubscriptionObjectId}')])
-
- # Provider Clean up
- self.cmd('az datashare synchronization-setting delete '
- '--account-name "{ProviderAccount}" '
- '--resource-group "{ProviderResourceGroup}" '
- '--share-name "{ProviderShare}" '
- '--name "{ProviderSynchronizationSetting}" '
- '--yes',
- checks=[])
-
- # self.cmd('az datashare invitation delete '
- # '--account-name "{ProviderAccount}" '
- # '--name "{ProviderInvitation}" '
- # '--resource-group "{ProviderResourceGroup}" '
- # '--share-name "{ProviderShare}"',
- # checks=[])
-
- self.cmd('az datashare dataset delete '
- '--account-name "{ProviderAccount}" '
- '--name "{ProviderDataset}" '
- '--resource-group "{ProviderResourceGroup}" '
- '--share-name "{ProviderShare}" '
- '--yes',
- checks=[])
-
- self.cmd('az datashare delete '
- '--account-name "{ProviderAccount}" '
- '--resource-group "{ProviderResourceGroup}" '
- '--name "{ProviderShare}" '
- '--yes',
- checks=[])
-
- self.cmd('az datashare account delete '
- '--name "{ProviderAccount}" '
- '--resource-group "{ProviderResourceGroup}" '
- '--no-wait '
- '--yes',
- checks=[])
-
- self.cmd('az datashare consumer trigger delete '
- '--account-name "{ConsumerAccount}" '
- '--resource-group "{ConsumerResourceGroup}" '
- '--share-subscription-name "{ConsumerShareSubscription}" '
- '--name "{ConsumerTrigger}" '
- '--yes '
- '--subscription "{ConsumerSubscription}"',
- checks=[])
- self.cmd('az datashare consumer dataset-mapping delete '
- '--account-name "{ConsumerAccount}" '
- '--name "{ConsumerDatasetMapping}" '
- '--resource-group "{ConsumerResourceGroup}" '
- '--share-subscription-name "{ConsumerShareSubscription}" '
- '--yes '
- '--subscription "{ConsumerSubscription}"',
- checks=[])
- self.cmd('az datashare consumer share-subscription delete '
- '--account-name "{ConsumerAccount}" '
- '--resource-group "{ConsumerResourceGroup}" '
- '--name "{ConsumerShareSubscription}" '
- '--yes '
- '--subscription "{ConsumerSubscription}"',
- checks=[])
- self.cmd('az datashare account delete '
- '--name "{ConsumerAccount}" '
- '--resource-group "{ConsumerResourceGroup}" '
- '--no-wait '
- '--yes '
- '--subscription "{ConsumerSubscription}"',
- checks=[])
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+
+import os
+from azure.cli.testsdk import ScenarioTest
+from azure.cli.testsdk import ResourceGroupPreparer
+from .example_steps import step_account_create
+from .example_steps import step_account_show
+from .example_steps import step_account_list
+from .example_steps import step_account_list2
+from .example_steps import step_account_update
+from .example_steps import step_data_set_mapping_create
+from .example_steps import step_data_set_mapping_create2
+from .example_steps import step_data_set_mapping_create3
+from .example_steps import step_data_set_mapping_create4
+from .example_steps import step_create
+from .example_steps import step_show
+from .example_steps import step_list
+from .example_steps import step_list_synchronization_detail
+from .example_steps import step_list_synchronization
+from .example_steps import step_data_set_create
+from .example_steps import step_data_set_create2
+from .example_steps import step_data_set_create3
+from .example_steps import step_data_set_create4
+from .example_steps import step_data_set_create5
+from .example_steps import step_data_set_create6
+from .example_steps import step_data_set_show
+from .example_steps import step_data_set_list
+from .example_steps import step_data_set_delete
+from .example_steps import step_invitation_create
+from .example_steps import step_invitation_show
+from .example_steps import step_invitation_list
+from .example_steps import step_invitation_delete
+from .example_steps import step_provider_share_subscription_show
+from .example_steps import step_provider_share_subscription_list
+from .example_steps import step_provider_share_subscription_adjust
+from .example_steps import step_provider_share_subscription_reinstate
+from .example_steps import step_provider_share_subscription_revoke
+from .example_steps import step_delete
+from .example_steps import step_share_subscription_create
+from .example_steps import step_share_subscription_show
+from .example_steps import step_share_subscription_list
+from .example_steps import step_share_subscription_cancel_synchronization
+from .example_steps import step_share_subscription_list2
+from .example_steps import step_share_subscription_list3
+from .example_steps import step_share_subscription_list_synchronization
+from .example_steps import step_share_subscription_synchronize
+from .example_steps import step_share_subscription_delete
+from .example_steps import step_synchronization_setting_create
+from .example_steps import step_synchronization_setting_show
+from .example_steps import step_synchronization_setting_list
+from .example_steps import step_synchronization_setting_delete
+from .example_steps import step_trigger_create
+from .example_steps import step_trigger_show
+from .example_steps import step_trigger_list
+from .example_steps import step_trigger_delete
+from .example_steps import step_account_delete
+from .example_steps import step_consumer_invitation_show
+from .example_steps import step_consumer_invitation_list_invitation
+from .example_steps import step_consumer_invitation_reject_invitation
+from .example_steps import step_consumer_source_data_set_list
+from .example_steps import step_data_set_mapping_create5
+from .example_steps import step_data_set_mapping_show
+from .example_steps import step_data_set_mapping_list
+from .example_steps import step_data_set_mapping_delete
+from .. import (
+ try_manual,
+ raise_if,
+ calc_coverage
+)
+
+
+TEST_DIR = os.path.abspath(os.path.join(os.path.abspath(__file__), '..'))
+
+
+# Env setup_scenario
+@try_manual
+def setup_scenario(test, rg):
+ pass
+
+
+# Env cleanup_scenario
+@try_manual
+def cleanup_scenario(test, rg):
+ pass
+
+
+# Testcase: Scenario
+@try_manual
+def call_scenario(test, rg):
+ setup_scenario(test, rg)
+ step_account_create(test, rg, checks=[
+ test.check("location", "West US 2", case_sensitive=False),
+ test.check("tags.tag1", "Red", case_sensitive=False),
+ test.check("tags.tag2", "White", case_sensitive=False),
+ test.check("name", "{myAccount}", case_sensitive=False),
+ ])
+ step_account_show(test, rg, checks=[
+ test.check("location", "West US 2", case_sensitive=False),
+ test.check("tags.tag1", "Red", case_sensitive=False),
+ test.check("tags.tag2", "White", case_sensitive=False),
+ test.check("name", "{myAccount}", case_sensitive=False),
+ ])
+ step_account_list(test, rg, checks=[
+ test.check('length(@)', 1),
+ ])
+ step_account_list2(test, rg, checks=[
+ test.check('length(@)', 1),
+ ])
+ step_account_update(test, rg, checks=[
+ test.check("location", "West US 2", case_sensitive=False),
+ test.check("tags.tag1", "Red", case_sensitive=False),
+ test.check("tags.tag2", "White", case_sensitive=False),
+ test.check("name", "{myAccount}", case_sensitive=False),
+ ])
+ step_data_set_mapping_create(test, rg, checks=[
+ test.check("name", "{myDataSetMapping}", case_sensitive=False),
+ ])
+ step_data_set_mapping_create2(test, rg, checks=[
+ test.check("name", "{myDataSetMapping}", case_sensitive=False),
+ ])
+ step_data_set_mapping_create3(test, rg, checks=[
+ test.check("name", "{myDataSetMapping}", case_sensitive=False),
+ ])
+ step_data_set_mapping_create4(test, rg, checks=[])
+ step_create(test, rg, checks=[
+ test.check("description", "share description", case_sensitive=False),
+ test.check("shareKind", "CopyBased", case_sensitive=False),
+ test.check("terms", "Confidential", case_sensitive=False),
+ test.check("name", "{myShare}", case_sensitive=False),
+ ])
+ step_show(test, rg, checks=[
+ test.check("description", "share description", case_sensitive=False),
+ test.check("shareKind", "CopyBased", case_sensitive=False),
+ test.check("terms", "Confidential", case_sensitive=False),
+ test.check("name", "{myShare}", case_sensitive=False),
+ ])
+ step_list(test, rg, checks=[
+ test.check('length(@)', 1),
+ ])
+ step_list_synchronization_detail(test, rg, checks=[])
+ step_list_synchronization(test, rg, checks=[])
+ step_data_set_create(test, rg, checks=[])
+ step_data_set_create2(test, rg, checks=[])
+ step_data_set_create3(test, rg, checks=[])
+ step_data_set_create4(test, rg, checks=[])
+ step_data_set_create5(test, rg, checks=[])
+ step_data_set_create6(test, rg, checks=[
+ test.check("name", "{myDataSet2}", case_sensitive=False),
+ ])
+ step_data_set_show(test, rg, checks=[
+ test.check("name", "{myDataSet}", case_sensitive=False),
+ ])
+ step_data_set_list(test, rg, checks=[
+ test.check('length(@)', 1),
+ ])
+ step_data_set_delete(test, rg, checks=[])
+ step_invitation_create(test, rg, checks=[
+ test.check("expirationDate", "2020-08-26T22:33:24.5785265Z", case_sensitive=False),
+ test.check("targetEmail", "receiver@microsoft.com", case_sensitive=False),
+ test.check("name", "{myInvitation}", case_sensitive=False),
+ ])
+ step_invitation_show(test, rg, checks=[
+ test.check("expirationDate", "2020-08-26T22:33:24.5785265Z", case_sensitive=False),
+ test.check("targetEmail", "receiver@microsoft.com", case_sensitive=False),
+ test.check("name", "{myInvitation}", case_sensitive=False),
+ ])
+ step_invitation_list(test, rg, checks=[
+ test.check('length(@)', 1),
+ ])
+ step_invitation_delete(test, rg, checks=[])
+ step_provider_share_subscription_show(test, rg, checks=[])
+ step_provider_share_subscription_list(test, rg, checks=[])
+ step_provider_share_subscription_adjust(test, rg, checks=[])
+ step_provider_share_subscription_reinstate(test, rg, checks=[])
+ step_provider_share_subscription_revoke(test, rg, checks=[])
+ step_delete(test, rg, checks=[])
+ step_share_subscription_create(test, rg, checks=[
+ test.check("expirationDate", "2020-08-26T22:33:24.5785265Z", case_sensitive=False),
+ test.check("sourceShareLocation", "eastus2", case_sensitive=False),
+ test.check("name", "{myShareSubscription}", case_sensitive=False),
+ ])
+ step_share_subscription_show(test, rg, checks=[
+ test.check("expirationDate", "2020-08-26T22:33:24.5785265Z", case_sensitive=False),
+ test.check("sourceShareLocation", "eastus2", case_sensitive=False),
+ test.check("name", "{myShareSubscription}", case_sensitive=False),
+ ])
+ step_share_subscription_list(test, rg, checks=[
+ test.check('length(@)', 1),
+ ])
+ step_share_subscription_cancel_synchronization(test, rg, checks=[])
+ step_share_subscription_list2(test, rg, checks=[])
+ step_share_subscription_list3(test, rg, checks=[])
+ step_share_subscription_list_synchronization(test, rg, checks=[])
+ step_share_subscription_synchronize(test, rg, checks=[])
+ step_share_subscription_delete(test, rg, checks=[])
+ step_synchronization_setting_create(test, rg, checks=[])
+ step_synchronization_setting_show(test, rg, checks=[])
+ step_synchronization_setting_list(test, rg, checks=[
+ test.check('length(@)', 1),
+ ])
+ step_synchronization_setting_delete(test, rg, checks=[])
+ step_trigger_create(test, rg, checks=[
+ test.check("name", "{myTrigger}", case_sensitive=False),
+ ])
+ step_trigger_show(test, rg, checks=[
+ test.check("name", "{myTrigger}", case_sensitive=False),
+ ])
+ step_trigger_list(test, rg, checks=[
+ test.check('length(@)', 1),
+ ])
+ step_trigger_delete(test, rg, checks=[])
+ step_account_delete(test, rg, checks=[])
+ step_consumer_invitation_show(test, rg, checks=[])
+ step_consumer_invitation_list_invitation(test, rg, checks=[])
+ step_consumer_invitation_reject_invitation(test, rg, checks=[])
+ step_consumer_source_data_set_list(test, rg, checks=[])
+ step_data_set_mapping_create5(test, rg, checks=[
+ test.check("name", "{myDataSetMapping}", case_sensitive=False),
+ ])
+ step_data_set_mapping_show(test, rg, checks=[
+ test.check("name", "{myDataSetMapping}", case_sensitive=False),
+ ])
+ step_data_set_mapping_list(test, rg, checks=[
+ test.check('length(@)', 1),
+ ])
+ step_data_set_mapping_delete(test, rg, checks=[])
+ cleanup_scenario(test, rg)
+
+
+# Test class for Scenario
+@try_manual
+class DatashareScenarioTest(ScenarioTest):
+
+ def __init__(self, *args, **kwargs):
+ super(DatashareScenarioTest, self).__init__(*args, **kwargs)
+ self.kwargs.update({
+ 'subscription_id': self.get_subscription_id()
+ })
+
+ self.kwargs.update({
+ 'myAccount': 'Account1',
+ 'myAccount2': 'sourceAccount',
+ 'myAccount3': 'consumerAccount',
+ 'myShare': 'Share1',
+ 'myShare2': 'share1',
+ 'myShareSubscription': 'ShareSubscription1',
+ 'myShareSubscription2': 'ShareSub1',
+ 'myShareSubscription3': 'Share1',
+ 'mySynchronizationSetting': 'SynchronizationSetting1',
+ 'mySynchronizationSetting2': 'Dataset1',
+ 'myTrigger': 'Trigger1',
+ 'myDataSet': 'Dataset1',
+ 'myDataSet2': 'dataset1',
+ 'myDataSetMapping': 'DatasetMapping1',
+ 'myDataSetMapping2': 'datasetMappingName1',
+ 'myInvitation': 'Invitation1',
+ })
+
+
+ @ResourceGroupPreparer(name_prefix='clitestdatashare_SampleResourceGroup'[:7], key='rg', parameter_name='rg')
+ def test_datashare_Scenario(self, rg):
+ call_scenario(self, rg)
+ calc_coverage(__file__)
+ raise_if()
+
diff --git a/src/datashare/azext_datashare/vendored_sdks/__init__.py b/src/datashare/azext_datashare/vendored_sdks/__init__.py
index 8d86d5a6be1..c9cfdc73e77 100644
--- a/src/datashare/azext_datashare/vendored_sdks/__init__.py
+++ b/src/datashare/azext_datashare/vendored_sdks/__init__.py
@@ -1,12 +1,12 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for
-# license information.
-#
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is
-# regenerated.
-# --------------------------------------------------------------------------
-
-__path__ = __import__('pkgutil').extend_path(__path__, __name__)
\ No newline at end of file
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+
+__path__ = __import__('pkgutil').extend_path(__path__, __name__)
diff --git a/src/datashare/azext_datashare/vendored_sdks/datashare/__init__.py b/src/datashare/azext_datashare/vendored_sdks/datashare/__init__.py
index eb57d0ef34e..fef7dc178db 100644
--- a/src/datashare/azext_datashare/vendored_sdks/datashare/__init__.py
+++ b/src/datashare/azext_datashare/vendored_sdks/datashare/__init__.py
@@ -8,3 +8,9 @@
from ._data_share_management_client import DataShareManagementClient
__all__ = ['DataShareManagementClient']
+
+try:
+ from ._patch import patch_sdk # type: ignore
+ patch_sdk()
+except ImportError:
+ pass
diff --git a/src/datashare/azext_datashare/vendored_sdks/datashare/_configuration.py b/src/datashare/azext_datashare/vendored_sdks/datashare/_configuration.py
index 88932a1125e..a75f5b819ce 100644
--- a/src/datashare/azext_datashare/vendored_sdks/datashare/_configuration.py
+++ b/src/datashare/azext_datashare/vendored_sdks/datashare/_configuration.py
@@ -6,10 +6,17 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any
+from typing import TYPE_CHECKING
from azure.core.configuration import Configuration
from azure.core.pipeline import policies
+from azure.mgmt.core.policies import ARMHttpLoggingPolicy
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any
+
+ from azure.core.credentials import TokenCredential
VERSION = "unknown"
@@ -20,7 +27,7 @@ class DataShareManagementClientConfiguration(Configuration):
attributes.
:param credential: Credential needed for the client to connect to Azure.
- :type credential: azure.core.credentials.TokenCredential
+ :type credential: ~azure.core.credentials.TokenCredential
:param subscription_id: The subscription identifier.
:type subscription_id: str
"""
@@ -40,7 +47,8 @@ def __init__(
self.credential = credential
self.subscription_id = subscription_id
- self.api_version = "2019-11-01"
+ self.api_version = "2020-09-01"
+ self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default'])
kwargs.setdefault('sdk_moniker', 'datasharemanagementclient/{}'.format(VERSION))
self._configure(**kwargs)
@@ -53,9 +61,10 @@ def _configure(
self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
+ self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get('authentication_policy')
if self.credential and not self.authentication_policy:
- self.authentication_policy = policies.BearerTokenCredentialPolicy(self.credential, **kwargs)
+ self.authentication_policy = policies.BearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs)
diff --git a/src/datashare/azext_datashare/vendored_sdks/datashare/_data_share_management_client.py b/src/datashare/azext_datashare/vendored_sdks/datashare/_data_share_management_client.py
index c36cabec848..b28886fee55 100644
--- a/src/datashare/azext_datashare/vendored_sdks/datashare/_data_share_management_client.py
+++ b/src/datashare/azext_datashare/vendored_sdks/datashare/_data_share_management_client.py
@@ -6,59 +6,66 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, Optional
+from typing import TYPE_CHECKING
-from azure.core import PipelineClient
+from azure.mgmt.core import ARMPipelineClient
from msrest import Deserializer, Serializer
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Optional
+
+ from azure.core.credentials import TokenCredential
+
from ._configuration import DataShareManagementClientConfiguration
-from .operations import AccountOperations
-from .operations import ConsumerInvitationOperations
-from .operations import DataSetOperations
-from .operations import DataSetMappingOperations
-from .operations import InvitationOperations
-from .operations import OperationOperations
-from .operations import ShareOperations
-from .operations import ProviderShareSubscriptionOperations
-from .operations import ShareSubscriptionOperations
-from .operations import ConsumerSourceDataSetOperations
-from .operations import SynchronizationSettingOperations
-from .operations import TriggerOperations
+from .operations import AccountsOperations
+from .operations import ConsumerInvitationsOperations
+from .operations import DataSetsOperations
+from .operations import DataSetMappingsOperations
+from .operations import InvitationsOperations
+from .operations import Operations
+from .operations import SharesOperations
+from .operations import ProviderShareSubscriptionsOperations
+from .operations import ShareSubscriptionsOperations
+from .operations import ConsumerSourceDataSetsOperations
+from .operations import SynchronizationSettingsOperations
+from .operations import TriggersOperations
from . import models
class DataShareManagementClient(object):
"""Creates a Microsoft.DataShare management client.
- :ivar account: AccountOperations operations
- :vartype account: data_share_management_client.operations.AccountOperations
- :ivar consumer_invitation: ConsumerInvitationOperations operations
- :vartype consumer_invitation: data_share_management_client.operations.ConsumerInvitationOperations
- :ivar data_set: DataSetOperations operations
- :vartype data_set: data_share_management_client.operations.DataSetOperations
- :ivar data_set_mapping: DataSetMappingOperations operations
- :vartype data_set_mapping: data_share_management_client.operations.DataSetMappingOperations
- :ivar invitation: InvitationOperations operations
- :vartype invitation: data_share_management_client.operations.InvitationOperations
- :ivar operation: OperationOperations operations
- :vartype operation: data_share_management_client.operations.OperationOperations
- :ivar share: ShareOperations operations
- :vartype share: data_share_management_client.operations.ShareOperations
- :ivar provider_share_subscription: ProviderShareSubscriptionOperations operations
- :vartype provider_share_subscription: data_share_management_client.operations.ProviderShareSubscriptionOperations
- :ivar share_subscription: ShareSubscriptionOperations operations
- :vartype share_subscription: data_share_management_client.operations.ShareSubscriptionOperations
- :ivar consumer_source_data_set: ConsumerSourceDataSetOperations operations
- :vartype consumer_source_data_set: data_share_management_client.operations.ConsumerSourceDataSetOperations
- :ivar synchronization_setting: SynchronizationSettingOperations operations
- :vartype synchronization_setting: data_share_management_client.operations.SynchronizationSettingOperations
- :ivar trigger: TriggerOperations operations
- :vartype trigger: data_share_management_client.operations.TriggerOperations
+ :ivar accounts: AccountsOperations operations
+ :vartype accounts: data_share_management_client.operations.AccountsOperations
+ :ivar consumer_invitations: ConsumerInvitationsOperations operations
+ :vartype consumer_invitations: data_share_management_client.operations.ConsumerInvitationsOperations
+ :ivar data_sets: DataSetsOperations operations
+ :vartype data_sets: data_share_management_client.operations.DataSetsOperations
+ :ivar data_set_mappings: DataSetMappingsOperations operations
+ :vartype data_set_mappings: data_share_management_client.operations.DataSetMappingsOperations
+ :ivar invitations: InvitationsOperations operations
+ :vartype invitations: data_share_management_client.operations.InvitationsOperations
+ :ivar operations: Operations operations
+ :vartype operations: data_share_management_client.operations.Operations
+ :ivar shares: SharesOperations operations
+ :vartype shares: data_share_management_client.operations.SharesOperations
+ :ivar provider_share_subscriptions: ProviderShareSubscriptionsOperations operations
+ :vartype provider_share_subscriptions: data_share_management_client.operations.ProviderShareSubscriptionsOperations
+ :ivar share_subscriptions: ShareSubscriptionsOperations operations
+ :vartype share_subscriptions: data_share_management_client.operations.ShareSubscriptionsOperations
+ :ivar consumer_source_data_sets: ConsumerSourceDataSetsOperations operations
+ :vartype consumer_source_data_sets: data_share_management_client.operations.ConsumerSourceDataSetsOperations
+ :ivar synchronization_settings: SynchronizationSettingsOperations operations
+ :vartype synchronization_settings: data_share_management_client.operations.SynchronizationSettingsOperations
+ :ivar triggers: TriggersOperations operations
+ :vartype triggers: data_share_management_client.operations.TriggersOperations
:param credential: Credential needed for the client to connect to Azure.
- :type credential: azure.core.credentials.TokenCredential
+ :type credential: ~azure.core.credentials.TokenCredential
:param subscription_id: The subscription identifier.
:type subscription_id: str
:param str base_url: Service URL
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
"""
def __init__(
@@ -72,35 +79,35 @@ def __init__(
if not base_url:
base_url = 'https://management.azure.com'
self._config = DataShareManagementClientConfiguration(credential, subscription_id, **kwargs)
- self._client = PipelineClient(base_url=base_url, config=self._config, **kwargs)
+ self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
- self.account = AccountOperations(
+ self.accounts = AccountsOperations(
self._client, self._config, self._serialize, self._deserialize)
- self.consumer_invitation = ConsumerInvitationOperations(
+ self.consumer_invitations = ConsumerInvitationsOperations(
self._client, self._config, self._serialize, self._deserialize)
- self.data_set = DataSetOperations(
+ self.data_sets = DataSetsOperations(
self._client, self._config, self._serialize, self._deserialize)
- self.data_set_mapping = DataSetMappingOperations(
+ self.data_set_mappings = DataSetMappingsOperations(
self._client, self._config, self._serialize, self._deserialize)
- self.invitation = InvitationOperations(
+ self.invitations = InvitationsOperations(
self._client, self._config, self._serialize, self._deserialize)
- self.operation = OperationOperations(
+ self.operations = Operations(
self._client, self._config, self._serialize, self._deserialize)
- self.share = ShareOperations(
+ self.shares = SharesOperations(
self._client, self._config, self._serialize, self._deserialize)
- self.provider_share_subscription = ProviderShareSubscriptionOperations(
+ self.provider_share_subscriptions = ProviderShareSubscriptionsOperations(
self._client, self._config, self._serialize, self._deserialize)
- self.share_subscription = ShareSubscriptionOperations(
+ self.share_subscriptions = ShareSubscriptionsOperations(
self._client, self._config, self._serialize, self._deserialize)
- self.consumer_source_data_set = ConsumerSourceDataSetOperations(
+ self.consumer_source_data_sets = ConsumerSourceDataSetsOperations(
self._client, self._config, self._serialize, self._deserialize)
- self.synchronization_setting = SynchronizationSettingOperations(
+ self.synchronization_settings = SynchronizationSettingsOperations(
self._client, self._config, self._serialize, self._deserialize)
- self.trigger = TriggerOperations(
+ self.triggers = TriggersOperations(
self._client, self._config, self._serialize, self._deserialize)
def close(self):
diff --git a/src/datashare/azext_datashare/vendored_sdks/datashare/aio/__init__.py b/src/datashare/azext_datashare/vendored_sdks/datashare/aio/__init__.py
index e14f4db6b79..eb57d0ef34e 100644
--- a/src/datashare/azext_datashare/vendored_sdks/datashare/aio/__init__.py
+++ b/src/datashare/azext_datashare/vendored_sdks/datashare/aio/__init__.py
@@ -6,5 +6,5 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from ._data_share_management_client_async import DataShareManagementClient
+from ._data_share_management_client import DataShareManagementClient
__all__ = ['DataShareManagementClient']
diff --git a/src/datashare/azext_datashare/vendored_sdks/datashare/aio/_configuration_async.py b/src/datashare/azext_datashare/vendored_sdks/datashare/aio/_configuration.py
similarity index 79%
rename from src/datashare/azext_datashare/vendored_sdks/datashare/aio/_configuration_async.py
rename to src/datashare/azext_datashare/vendored_sdks/datashare/aio/_configuration.py
index a5c30f8ec3f..b9bced05c08 100644
--- a/src/datashare/azext_datashare/vendored_sdks/datashare/aio/_configuration_async.py
+++ b/src/datashare/azext_datashare/vendored_sdks/datashare/aio/_configuration.py
@@ -6,10 +6,15 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any
+from typing import Any, TYPE_CHECKING
from azure.core.configuration import Configuration
from azure.core.pipeline import policies
+from azure.mgmt.core.policies import ARMHttpLoggingPolicy
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from azure.core.credentials_async import AsyncTokenCredential
VERSION = "unknown"
@@ -20,14 +25,14 @@ class DataShareManagementClientConfiguration(Configuration):
attributes.
:param credential: Credential needed for the client to connect to Azure.
- :type credential: azure.core.credentials.TokenCredential
+ :type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param subscription_id: The subscription identifier.
:type subscription_id: str
"""
def __init__(
self,
- credential: "TokenCredential",
+ credential: "AsyncTokenCredential",
subscription_id: str,
**kwargs: Any
) -> None:
@@ -39,7 +44,8 @@ def __init__(
self.credential = credential
self.subscription_id = subscription_id
- self.api_version = "2019-11-01"
+ self.api_version = "2020-09-01"
+ self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default'])
kwargs.setdefault('sdk_moniker', 'datasharemanagementclient/{}'.format(VERSION))
self._configure(**kwargs)
@@ -51,9 +57,10 @@ def _configure(
self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
+ self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get('authentication_policy')
if self.credential and not self.authentication_policy:
- self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, **kwargs)
+ self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs)
diff --git a/src/datashare/azext_datashare/vendored_sdks/datashare/aio/_data_share_management_client.py b/src/datashare/azext_datashare/vendored_sdks/datashare/aio/_data_share_management_client.py
new file mode 100644
index 00000000000..fd1b48b990d
--- /dev/null
+++ b/src/datashare/azext_datashare/vendored_sdks/datashare/aio/_data_share_management_client.py
@@ -0,0 +1,118 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from typing import Any, Optional, TYPE_CHECKING
+
+from azure.mgmt.core import AsyncARMPipelineClient
+from msrest import Deserializer, Serializer
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from azure.core.credentials_async import AsyncTokenCredential
+
+from ._configuration import DataShareManagementClientConfiguration
+from .operations import AccountsOperations
+from .operations import ConsumerInvitationsOperations
+from .operations import DataSetsOperations
+from .operations import DataSetMappingsOperations
+from .operations import InvitationsOperations
+from .operations import Operations
+from .operations import SharesOperations
+from .operations import ProviderShareSubscriptionsOperations
+from .operations import ShareSubscriptionsOperations
+from .operations import ConsumerSourceDataSetsOperations
+from .operations import SynchronizationSettingsOperations
+from .operations import TriggersOperations
+from .. import models
+
+
+class DataShareManagementClient(object):
+ """Creates a Microsoft.DataShare management client.
+
+ :ivar accounts: AccountsOperations operations
+ :vartype accounts: data_share_management_client.aio.operations.AccountsOperations
+ :ivar consumer_invitations: ConsumerInvitationsOperations operations
+ :vartype consumer_invitations: data_share_management_client.aio.operations.ConsumerInvitationsOperations
+ :ivar data_sets: DataSetsOperations operations
+ :vartype data_sets: data_share_management_client.aio.operations.DataSetsOperations
+ :ivar data_set_mappings: DataSetMappingsOperations operations
+ :vartype data_set_mappings: data_share_management_client.aio.operations.DataSetMappingsOperations
+ :ivar invitations: InvitationsOperations operations
+ :vartype invitations: data_share_management_client.aio.operations.InvitationsOperations
+ :ivar operations: Operations operations
+ :vartype operations: data_share_management_client.aio.operations.Operations
+ :ivar shares: SharesOperations operations
+ :vartype shares: data_share_management_client.aio.operations.SharesOperations
+ :ivar provider_share_subscriptions: ProviderShareSubscriptionsOperations operations
+ :vartype provider_share_subscriptions: data_share_management_client.aio.operations.ProviderShareSubscriptionsOperations
+ :ivar share_subscriptions: ShareSubscriptionsOperations operations
+ :vartype share_subscriptions: data_share_management_client.aio.operations.ShareSubscriptionsOperations
+ :ivar consumer_source_data_sets: ConsumerSourceDataSetsOperations operations
+ :vartype consumer_source_data_sets: data_share_management_client.aio.operations.ConsumerSourceDataSetsOperations
+ :ivar synchronization_settings: SynchronizationSettingsOperations operations
+ :vartype synchronization_settings: data_share_management_client.aio.operations.SynchronizationSettingsOperations
+ :ivar triggers: TriggersOperations operations
+ :vartype triggers: data_share_management_client.aio.operations.TriggersOperations
+ :param credential: Credential needed for the client to connect to Azure.
+ :type credential: ~azure.core.credentials_async.AsyncTokenCredential
+ :param subscription_id: The subscription identifier.
+ :type subscription_id: str
+ :param str base_url: Service URL
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ """
+
+ def __init__(
+ self,
+ credential: "AsyncTokenCredential",
+ subscription_id: str,
+ base_url: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
+ if not base_url:
+ base_url = 'https://management.azure.com'
+ self._config = DataShareManagementClientConfiguration(credential, subscription_id, **kwargs)
+ self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
+
+ client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
+ self._serialize = Serializer(client_models)
+ self._deserialize = Deserializer(client_models)
+
+ self.accounts = AccountsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.consumer_invitations = ConsumerInvitationsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.data_sets = DataSetsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.data_set_mappings = DataSetMappingsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.invitations = InvitationsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.operations = Operations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.shares = SharesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.provider_share_subscriptions = ProviderShareSubscriptionsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.share_subscriptions = ShareSubscriptionsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.consumer_source_data_sets = ConsumerSourceDataSetsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.synchronization_settings = SynchronizationSettingsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.triggers = TriggersOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+
+ async def close(self) -> None:
+ await self._client.close()
+
+ async def __aenter__(self) -> "DataShareManagementClient":
+ await self._client.__aenter__()
+ return self
+
+ async def __aexit__(self, *exc_details) -> None:
+ await self._client.__aexit__(*exc_details)
diff --git a/src/datashare/azext_datashare/vendored_sdks/datashare/aio/_data_share_management_client_async.py b/src/datashare/azext_datashare/vendored_sdks/datashare/aio/_data_share_management_client_async.py
deleted file mode 100644
index 0609b8e2594..00000000000
--- a/src/datashare/azext_datashare/vendored_sdks/datashare/aio/_data_share_management_client_async.py
+++ /dev/null
@@ -1,113 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-
-from typing import Any, Optional
-
-from azure.core import AsyncPipelineClient
-from msrest import Deserializer, Serializer
-
-from ._configuration_async import DataShareManagementClientConfiguration
-from .operations_async import AccountOperations
-from .operations_async import ConsumerInvitationOperations
-from .operations_async import DataSetOperations
-from .operations_async import DataSetMappingOperations
-from .operations_async import InvitationOperations
-from .operations_async import OperationOperations
-from .operations_async import ShareOperations
-from .operations_async import ProviderShareSubscriptionOperations
-from .operations_async import ShareSubscriptionOperations
-from .operations_async import ConsumerSourceDataSetOperations
-from .operations_async import SynchronizationSettingOperations
-from .operations_async import TriggerOperations
-from .. import models
-
-
-class DataShareManagementClient(object):
- """Creates a Microsoft.DataShare management client.
-
- :ivar account: AccountOperations operations
- :vartype account: data_share_management_client.aio.operations_async.AccountOperations
- :ivar consumer_invitation: ConsumerInvitationOperations operations
- :vartype consumer_invitation: data_share_management_client.aio.operations_async.ConsumerInvitationOperations
- :ivar data_set: DataSetOperations operations
- :vartype data_set: data_share_management_client.aio.operations_async.DataSetOperations
- :ivar data_set_mapping: DataSetMappingOperations operations
- :vartype data_set_mapping: data_share_management_client.aio.operations_async.DataSetMappingOperations
- :ivar invitation: InvitationOperations operations
- :vartype invitation: data_share_management_client.aio.operations_async.InvitationOperations
- :ivar operation: OperationOperations operations
- :vartype operation: data_share_management_client.aio.operations_async.OperationOperations
- :ivar share: ShareOperations operations
- :vartype share: data_share_management_client.aio.operations_async.ShareOperations
- :ivar provider_share_subscription: ProviderShareSubscriptionOperations operations
- :vartype provider_share_subscription: data_share_management_client.aio.operations_async.ProviderShareSubscriptionOperations
- :ivar share_subscription: ShareSubscriptionOperations operations
- :vartype share_subscription: data_share_management_client.aio.operations_async.ShareSubscriptionOperations
- :ivar consumer_source_data_set: ConsumerSourceDataSetOperations operations
- :vartype consumer_source_data_set: data_share_management_client.aio.operations_async.ConsumerSourceDataSetOperations
- :ivar synchronization_setting: SynchronizationSettingOperations operations
- :vartype synchronization_setting: data_share_management_client.aio.operations_async.SynchronizationSettingOperations
- :ivar trigger: TriggerOperations operations
- :vartype trigger: data_share_management_client.aio.operations_async.TriggerOperations
- :param credential: Credential needed for the client to connect to Azure.
- :type credential: azure.core.credentials.TokenCredential
- :param subscription_id: The subscription identifier.
- :type subscription_id: str
- :param str base_url: Service URL
- """
-
- def __init__(
- self,
- credential: "TokenCredential",
- subscription_id: str,
- base_url: Optional[str] = None,
- **kwargs: Any
- ) -> None:
- if not base_url:
- base_url = 'https://management.azure.com'
- self._config = DataShareManagementClientConfiguration(credential, subscription_id, **kwargs)
- self._client = AsyncPipelineClient(base_url=base_url, config=self._config, **kwargs)
-
- client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
- self._serialize = Serializer(client_models)
- self._deserialize = Deserializer(client_models)
-
- self.account = AccountOperations(
- self._client, self._config, self._serialize, self._deserialize)
- self.consumer_invitation = ConsumerInvitationOperations(
- self._client, self._config, self._serialize, self._deserialize)
- self.data_set = DataSetOperations(
- self._client, self._config, self._serialize, self._deserialize)
- self.data_set_mapping = DataSetMappingOperations(
- self._client, self._config, self._serialize, self._deserialize)
- self.invitation = InvitationOperations(
- self._client, self._config, self._serialize, self._deserialize)
- self.operation = OperationOperations(
- self._client, self._config, self._serialize, self._deserialize)
- self.share = ShareOperations(
- self._client, self._config, self._serialize, self._deserialize)
- self.provider_share_subscription = ProviderShareSubscriptionOperations(
- self._client, self._config, self._serialize, self._deserialize)
- self.share_subscription = ShareSubscriptionOperations(
- self._client, self._config, self._serialize, self._deserialize)
- self.consumer_source_data_set = ConsumerSourceDataSetOperations(
- self._client, self._config, self._serialize, self._deserialize)
- self.synchronization_setting = SynchronizationSettingOperations(
- self._client, self._config, self._serialize, self._deserialize)
- self.trigger = TriggerOperations(
- self._client, self._config, self._serialize, self._deserialize)
-
- async def close(self) -> None:
- await self._client.close()
-
- async def __aenter__(self) -> "DataShareManagementClient":
- await self._client.__aenter__()
- return self
-
- async def __aexit__(self, *exc_details) -> None:
- await self._client.__aexit__(*exc_details)
diff --git a/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations/__init__.py b/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations/__init__.py
new file mode 100644
index 00000000000..7cd147304b9
--- /dev/null
+++ b/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations/__init__.py
@@ -0,0 +1,35 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._accounts_operations import AccountsOperations
+from ._consumer_invitations_operations import ConsumerInvitationsOperations
+from ._data_sets_operations import DataSetsOperations
+from ._data_set_mappings_operations import DataSetMappingsOperations
+from ._invitations_operations import InvitationsOperations
+from ._operations import Operations
+from ._shares_operations import SharesOperations
+from ._provider_share_subscriptions_operations import ProviderShareSubscriptionsOperations
+from ._share_subscriptions_operations import ShareSubscriptionsOperations
+from ._consumer_source_data_sets_operations import ConsumerSourceDataSetsOperations
+from ._synchronization_settings_operations import SynchronizationSettingsOperations
+from ._triggers_operations import TriggersOperations
+
+__all__ = [
+ 'AccountsOperations',
+ 'ConsumerInvitationsOperations',
+ 'DataSetsOperations',
+ 'DataSetMappingsOperations',
+ 'InvitationsOperations',
+ 'Operations',
+ 'SharesOperations',
+ 'ProviderShareSubscriptionsOperations',
+ 'ShareSubscriptionsOperations',
+ 'ConsumerSourceDataSetsOperations',
+ 'SynchronizationSettingsOperations',
+ 'TriggersOperations',
+]
diff --git a/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations_async/_account_operations_async.py b/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations/_accounts_operations.py
similarity index 60%
rename from src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations_async/_account_operations_async.py
rename to src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations/_accounts_operations.py
index 5c293630594..f2b01198eb1 100644
--- a/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations_async/_account_operations_async.py
+++ b/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations/_accounts_operations.py
@@ -5,22 +5,24 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
-from azure.core.polling import AsyncNoPolling, AsyncPollingMethod, async_poller
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-class AccountOperations:
- """AccountOperations async operations.
+class AccountsOperations:
+ """AccountsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
@@ -41,31 +43,109 @@ def __init__(self, client, config, serializer, deserializer) -> None:
self._deserialize = deserializer
self._config = config
+ def list_by_subscription(
+ self,
+ skip_token: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.AccountList"]:
+ """List Accounts in a subscription.
+
+ List Accounts in Subscription.
+
+ :param skip_token: Continuation token.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either AccountList or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_share_management_client.models.AccountList]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.AccountList"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_subscription.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('AccountList', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.DataShareError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataShare/accounts'} # type: ignore
+
async def get(
self,
resource_group_name: str,
account_name: str,
**kwargs
) -> "models.Account":
- """Get an account.
+ """Get an account under a resource group.
- Get an account under a resource group.
+ Get an account.
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param account_name: The name of the share account.
:type account_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: Account or the result of cls(response)
+ :return: Account, or the result of cls(response)
:rtype: ~data_share_management_client.models.Account
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.Account"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
# Construct URL
- url = self.get.metadata['url']
+ url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -79,9 +159,8 @@ async def get(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
@@ -89,34 +168,34 @@ async def get(
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('Account', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}'}
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}'} # type: ignore
async def _create_initial(
self,
resource_group_name: str,
account_name: str,
- identity: "models.Identity",
- location: Optional[str] = None,
- tags: Optional[Dict[str, str]] = None,
+ account: "models.Account",
**kwargs
) -> "models.Account":
cls = kwargs.pop('cls', None) # type: ClsType["models.Account"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
-
- _account = models.Account(location=location, tags=tags, identity=identity)
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
# Construct URL
- url = self._create_initial.metadata['url']
+ url = self._create_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -131,23 +210,20 @@ async def _create_initial(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(_account, 'Account')
+ body_content = self._serialize.body(account, 'Account')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Account', pipeline_response)
@@ -155,54 +231,56 @@ async def _create_initial(
deserialized = self._deserialize('Account', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- _create_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}'}
+ _create_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}'} # type: ignore
- async def create(
+ async def begin_create(
self,
resource_group_name: str,
account_name: str,
- identity: "models.Identity",
- location: Optional[str] = None,
- tags: Optional[Dict[str, str]] = None,
+ account: "models.Account",
**kwargs
- ) -> "models.Account":
- """Create an account.
+ ) -> AsyncLROPoller["models.Account"]:
+ """Create an account in the given resource group.
- Create an account in the given resource group.
+ Create an account.
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param account_name: The name of the share account.
:type account_name: str
- :param identity: Identity Info on the Account.
- :type identity: ~data_share_management_client.models.Identity
- :param location: Location of the azure resource.
- :type location: str
- :param tags: Tags on the azure resource.
- :type tags: dict[str, str]
+ :param account: The account payload.
+ :type account: ~data_share_management_client.models.Account
:keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :return: An instance of LROPoller that returns Account
- :rtype: ~azure.core.polling.LROPoller[~data_share_management_client.models.Account]
-
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either Account or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~data_share_management_client.models.Account]
:raises ~azure.core.exceptions.HttpResponseError:
"""
- polling = kwargs.pop('polling', False) # type: Union[bool, AsyncPollingMethod]
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.Account"]
- raw_result = await self._create_initial(
- resource_group_name=resource_group_name,
- account_name=account_name,
- identity=identity,
- location=location,
- tags=tags,
- cls=lambda x,y,z: x,
- **kwargs
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
)
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._create_initial(
+ resource_group_name=resource_group_name,
+ account_name=account_name,
+ account=account,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('Account', pipeline_response)
@@ -211,28 +289,42 @@ def get_long_running_output(pipeline_response):
return cls(pipeline_response, deserialized, {})
return deserialized
- lro_delay = kwargs.get(
- 'polling_interval',
- self._config.polling_interval
- )
- if polling is True: raise ValueError("polling being True is not valid because no default polling implemetation has been defined.")
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'accountName': self._serialize.url("account_name", account_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
- return await async_poller(self._client, raw_result, get_long_running_output, polling_method)
- create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}'}
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}'} # type: ignore
async def _delete_initial(
self,
resource_group_name: str,
account_name: str,
**kwargs
- ) -> "models.OperationResponse":
- cls = kwargs.pop('cls', None) # type: ClsType["models.OperationResponse"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ ) -> Optional["models.OperationResponse"]:
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.OperationResponse"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
# Construct URL
- url = self._delete_initial.metadata['url']
+ url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -246,9 +338,8 @@ async def _delete_initial(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
@@ -256,49 +347,59 @@ async def _delete_initial(
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationResponse', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}'}
+ _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}'} # type: ignore
- async def delete(
+ async def begin_delete(
self,
resource_group_name: str,
account_name: str,
**kwargs
- ) -> "models.OperationResponse":
- """DeleteAccount.
+ ) -> AsyncLROPoller["models.OperationResponse"]:
+ """Delete an account.
- Delete an account.
+ DeleteAccount.
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param account_name: The name of the share account.
:type account_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :return: An instance of LROPoller that returns OperationResponse
- :rtype: ~azure.core.polling.LROPoller[~data_share_management_client.models.OperationResponse]
-
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either OperationResponse or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~data_share_management_client.models.OperationResponse]
:raises ~azure.core.exceptions.HttpResponseError:
"""
- polling = kwargs.pop('polling', False) # type: Union[bool, AsyncPollingMethod]
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.OperationResponse"]
- raw_result = await self._delete_initial(
- resource_group_name=resource_group_name,
- account_name=account_name,
- cls=lambda x,y,z: x,
- **kwargs
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
)
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._delete_initial(
+ resource_group_name=resource_group_name,
+ account_name=account_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('OperationResponse', pipeline_response)
@@ -307,47 +408,59 @@ def get_long_running_output(pipeline_response):
return cls(pipeline_response, deserialized, {})
return deserialized
- lro_delay = kwargs.get(
- 'polling_interval',
- self._config.polling_interval
- )
- if polling is True: raise ValueError("polling being True is not valid because no default polling implemetation has been defined.")
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'accountName': self._serialize.url("account_name", account_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
- return await async_poller(self._client, raw_result, get_long_running_output, polling_method)
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}'}
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}'} # type: ignore
async def update(
self,
resource_group_name: str,
account_name: str,
- tags: Optional[Dict[str, str]] = None,
+ account_update_parameters: "models.AccountUpdateParameters",
**kwargs
) -> "models.Account":
- """Patch an account.
+ """Patch a given account.
- Patch a given account.
+ Patch an account.
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param account_name: The name of the share account.
:type account_name: str
- :param tags: Tags on the azure resource.
- :type tags: dict[str, str]
+ :param account_update_parameters: The account update parameters.
+ :type account_update_parameters: ~data_share_management_client.models.AccountUpdateParameters
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: Account or the result of cls(response)
+ :return: Account, or the result of cls(response)
:rtype: ~data_share_management_client.models.Account
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.Account"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
-
- _account_update_parameters = models.AccountUpdateParameters(tags=tags)
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
# Construct URL
- url = self.update.metadata['url']
+ url = self.update.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -362,147 +475,79 @@ async def update(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(_account_update_parameters, 'AccountUpdateParameters')
+ body_content = self._serialize.body(account_update_parameters, 'AccountUpdateParameters')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
-
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('Account', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}'}
-
- def list_by_subscription(
- self,
- skip_token: Optional[str] = None,
- **kwargs
- ) -> "models.AccountList":
- """List Accounts in Subscription.
-
- List Accounts in a subscription.
-
- :param skip_token: Continuation token.
- :type skip_token: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: AccountList or the result of cls(response)
- :rtype: ~data_share_management_client.models.AccountList
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.AccountList"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
-
- def prepare_request(next_link=None):
- if not next_link:
- # Construct URL
- url = self.list_by_subscription.metadata['url']
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- else:
- url = next_link
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
- if skip_token is not None:
- query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- # Construct and send request
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- async def extract_data(pipeline_response):
- deserialized = self._deserialize('AccountList', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, AsyncList(list_of_elem)
-
- async def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- error = self._deserialize(models.DataShareError, response)
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, model=error)
-
- return pipeline_response
-
- return AsyncItemPaged(
- get_next, extract_data
- )
- list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataShare/accounts'}
+ update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}'} # type: ignore
def list_by_resource_group(
self,
resource_group_name: str,
skip_token: Optional[str] = None,
**kwargs
- ) -> "models.AccountList":
- """List Accounts in ResourceGroup.
+ ) -> AsyncIterable["models.AccountList"]:
+ """List Accounts in a resource group.
- List Accounts in a resource group.
+ List Accounts in ResourceGroup.
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param skip_token: Continuation token.
:type skip_token: str
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: AccountList or the result of cls(response)
- :rtype: ~data_share_management_client.models.AccountList
+ :return: An iterator like instance of either AccountList or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_share_management_client.models.AccountList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.AccountList"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
if not next_link:
# Construct URL
- url = self.list_by_resource_group.metadata['url']
+ url = self.list_by_resource_group.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
- if skip_token is not None:
- query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- # Construct and send request
- request = self._client.get(url, query_parameters, header_parameters)
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
@@ -521,11 +566,11 @@ async def get_next(next_link=None):
if response.status_code not in [200]:
error = self._deserialize(models.DataShareError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
- list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts'}
+ list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts'} # type: ignore
diff --git a/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations_async/_consumer_invitation_operations_async.py b/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations/_consumer_invitations_operations.py
similarity index 66%
rename from src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations_async/_consumer_invitation_operations_async.py
rename to src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations/_consumer_invitations_operations.py
index 97d9f26ea32..f6667530950 100644
--- a/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations_async/_consumer_invitation_operations_async.py
+++ b/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations/_consumer_invitations_operations.py
@@ -5,21 +5,22 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, Callable, Dict, Generic, Optional, TypeVar
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-class ConsumerInvitationOperations:
- """ConsumerInvitationOperations async operations.
+class ConsumerInvitationsOperations:
+ """ConsumerInvitationsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
@@ -40,36 +41,108 @@ def __init__(self, client, config, serializer, deserializer) -> None:
self._deserialize = deserializer
self._config = config
- async def reject_invitation(
+ def list_invitations(
+ self,
+ skip_token: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.ConsumerInvitationList"]:
+ """List the invitations.
+
+ Lists invitations.
+
+ :param skip_token: The continuation token.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ConsumerInvitationList or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_share_management_client.models.ConsumerInvitationList]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ConsumerInvitationList"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_invitations.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('ConsumerInvitationList', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.DataShareError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list_invitations.metadata = {'url': '/providers/Microsoft.DataShare/listInvitations'} # type: ignore
+
+ async def get(
self,
location: str,
invitation_id: str,
**kwargs
) -> "models.ConsumerInvitation":
- """Reject an invitation.
+ """Gets the invitation identified by invitationId.
- Rejects the invitation identified by invitationId.
+ Get an invitation.
:param location: Location of the invitation.
:type location: str
- :param invitation_id: Unique id of the invitation.
+ :param invitation_id: An invitation id.
:type invitation_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: ConsumerInvitation or the result of cls(response)
+ :return: ConsumerInvitation, or the result of cls(response)
:rtype: ~data_share_management_client.models.ConsumerInvitation
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ConsumerInvitation"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
-
- _invitation = models.ConsumerInvitation(invitation_id=invitation_id)
- api_version = "2019-11-01"
- content_type = kwargs.pop("content_type", "application/json")
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
# Construct URL
- url = self.reject_invitation.metadata['url']
+ url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'location': self._serialize.url("location", location, 'str'),
+ 'invitationId': self._serialize.url("invitation_id", invitation_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
@@ -79,59 +152,57 @@ async def reject_invitation(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- # Construct and send request
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(_invitation, 'ConsumerInvitation')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+ request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('ConsumerInvitation', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- reject_invitation.metadata = {'url': '/providers/Microsoft.DataShare/locations/{location}/RejectInvitation'}
+ get.metadata = {'url': '/providers/Microsoft.DataShare/locations/{location}/consumerInvitations/{invitationId}'} # type: ignore
- async def get(
+ async def reject_invitation(
self,
location: str,
- invitation_id: str,
+ invitation: "models.ConsumerInvitation",
**kwargs
) -> "models.ConsumerInvitation":
- """Get an invitation.
+ """Rejects the invitation identified by invitationId.
- Gets the invitation identified by invitationId.
+ Reject an invitation.
:param location: Location of the invitation.
:type location: str
- :param invitation_id: An invitation id.
- :type invitation_id: str
+ :param invitation: An invitation payload.
+ :type invitation: ~data_share_management_client.models.ConsumerInvitation
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: ConsumerInvitation or the result of cls(response)
+ :return: ConsumerInvitation, or the result of cls(response)
:rtype: ~data_share_management_client.models.ConsumerInvitation
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ConsumerInvitation"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
# Construct URL
- url = self.get.metadata['url']
+ url = self.reject_invitation.metadata['url'] # type: ignore
path_format_arguments = {
'location': self._serialize.url("location", location, 'str'),
- 'invitationId': self._serialize.url("invitation_id", invitation_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
@@ -141,88 +212,25 @@ async def get(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
- request = self._client.get(url, query_parameters, header_parameters)
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(invitation, 'ConsumerInvitation')
+ body_content_kwargs['content'] = body_content
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('ConsumerInvitation', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- get.metadata = {'url': '/providers/Microsoft.DataShare/locations/{location}/consumerInvitations/{invitationId}'}
-
- def list_invitation(
- self,
- skip_token: Optional[str] = None,
- **kwargs
- ) -> "models.ConsumerInvitationList":
- """Lists invitations.
-
- List the invitations.
-
- :param skip_token: The continuation token.
- :type skip_token: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: ConsumerInvitationList or the result of cls(response)
- :rtype: ~data_share_management_client.models.ConsumerInvitationList
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.ConsumerInvitationList"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
-
- def prepare_request(next_link=None):
- if not next_link:
- # Construct URL
- url = self.list_invitation.metadata['url']
- else:
- url = next_link
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
- if skip_token is not None:
- query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- # Construct and send request
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- async def extract_data(pipeline_response):
- deserialized = self._deserialize('ConsumerInvitationList', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, AsyncList(list_of_elem)
-
- async def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- error = self._deserialize(models.DataShareError, response)
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, model=error)
-
- return pipeline_response
-
- return AsyncItemPaged(
- get_next, extract_data
- )
- list_invitation.metadata = {'url': '/providers/Microsoft.DataShare/ListInvitations'}
+ reject_invitation.metadata = {'url': '/providers/Microsoft.DataShare/locations/{location}/rejectInvitation'} # type: ignore
diff --git a/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations_async/_consumer_source_data_set_operations_async.py b/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations/_consumer_source_data_sets_operations.py
similarity index 72%
rename from src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations_async/_consumer_source_data_set_operations_async.py
rename to src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations/_consumer_source_data_sets_operations.py
index 65443ffd30b..dfcc82eb843 100644
--- a/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations_async/_consumer_source_data_set_operations_async.py
+++ b/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations/_consumer_source_data_sets_operations.py
@@ -5,21 +5,22 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, Callable, Dict, Generic, Optional, TypeVar
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-class ConsumerSourceDataSetOperations:
- """ConsumerSourceDataSetOperations async operations.
+class ConsumerSourceDataSetsOperations:
+ """ConsumerSourceDataSetsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
@@ -47,7 +48,7 @@ def list_by_share_subscription(
share_subscription_name: str,
skip_token: Optional[str] = None,
**kwargs
- ) -> "models.ConsumerSourceDataSetList":
+ ) -> AsyncIterable["models.ConsumerSourceDataSetList"]:
"""Get source dataSets of a shareSubscription.
Get source dataSets of a shareSubscription.
@@ -61,18 +62,26 @@ def list_by_share_subscription(
:param skip_token: Continuation token.
:type skip_token: str
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: ConsumerSourceDataSetList or the result of cls(response)
- :rtype: ~data_share_management_client.models.ConsumerSourceDataSetList
+ :return: An iterator like instance of either ConsumerSourceDataSetList or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_share_management_client.models.ConsumerSourceDataSetList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ConsumerSourceDataSetList"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
if not next_link:
# Construct URL
- url = self.list_by_share_subscription.metadata['url']
+ url = self.list_by_share_subscription.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -80,21 +89,17 @@ def prepare_request(next_link=None):
'shareSubscriptionName': self._serialize.url("share_subscription_name", share_subscription_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
- if skip_token is not None:
- query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- # Construct and send request
- request = self._client.get(url, query_parameters, header_parameters)
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
@@ -113,11 +118,11 @@ async def get_next(next_link=None):
if response.status_code not in [200]:
error = self._deserialize(models.DataShareError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
- list_by_share_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/ConsumerSourceDataSets'}
+ list_by_share_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/consumerSourceDataSets'} # type: ignore
diff --git a/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations_async/_data_set_mapping_operations_async.py b/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations/_data_set_mappings_operations.py
similarity index 75%
rename from src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations_async/_data_set_mapping_operations_async.py
rename to src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations/_data_set_mappings_operations.py
index acf0170149b..d0daa2fa90c 100644
--- a/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations_async/_data_set_mapping_operations_async.py
+++ b/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations/_data_set_mappings_operations.py
@@ -5,21 +5,22 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-class DataSetMappingOperations:
- """DataSetMappingOperations async operations.
+class DataSetMappingsOperations:
+ """DataSetMappingsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
@@ -48,9 +49,9 @@ async def get(
data_set_mapping_name: str,
**kwargs
) -> "models.DataSetMapping":
- """Get a DataSetMapping in a shareSubscription.
+ """Get DataSetMapping in a shareSubscription.
- Get DataSetMapping in a shareSubscription.
+ Get a DataSetMapping in a shareSubscription.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -61,16 +62,20 @@ async def get(
:param data_set_mapping_name: The name of the dataSetMapping.
:type data_set_mapping_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: DataSetMapping or the result of cls(response)
+ :return: DataSetMapping, or the result of cls(response)
:rtype: ~data_share_management_client.models.DataSetMapping
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.DataSetMapping"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
# Construct URL
- url = self.get.metadata['url']
+ url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -86,9 +91,8 @@ async def get(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
@@ -96,15 +100,15 @@ async def get(
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('DataSetMapping', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/dataSetMappings/{dataSetMappingName}'}
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/dataSetMappings/{dataSetMappingName}'} # type: ignore
async def create(
self,
@@ -115,11 +119,11 @@ async def create(
data_set_mapping: "models.DataSetMapping",
**kwargs
) -> "models.DataSetMapping":
- """Create a DataSetMapping.
-
- Maps a source data set in the source share to a sink data set in the share subscription.
+ """Maps a source data set in the source share to a sink data set in the share subscription.
Enables copying the data set from source to destination.
+ Create a DataSetMapping.
+
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param account_name: The name of the share account.
@@ -132,17 +136,21 @@ async def create(
:param data_set_mapping: Destination data set configuration details.
:type data_set_mapping: ~data_share_management_client.models.DataSetMapping
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: DataSetMapping or the result of cls(response)
- :rtype: ~data_share_management_client.models.DataSetMapping or ~data_share_management_client.models.DataSetMapping
+ :return: DataSetMapping, or the result of cls(response)
+ :rtype: ~data_share_management_client.models.DataSetMapping
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.DataSetMapping"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
# Construct URL
- url = self.create.metadata['url']
+ url = self.create.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -159,23 +167,20 @@ async def create(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(data_set_mapping, 'DataSetMapping')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('DataSetMapping', pipeline_response)
@@ -183,10 +188,10 @@ async def create(
deserialized = self._deserialize('DataSetMapping', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/dataSetMappings/{dataSetMappingName}'}
+ create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/dataSetMappings/{dataSetMappingName}'} # type: ignore
async def delete(
self,
@@ -196,9 +201,9 @@ async def delete(
data_set_mapping_name: str,
**kwargs
) -> None:
- """Delete a DataSetMapping in a shareSubscription.
+ """Delete DataSetMapping in a shareSubscription.
- Delete DataSetMapping in a shareSubscription.
+ Delete a DataSetMapping in a shareSubscription.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -209,16 +214,20 @@ async def delete(
:param data_set_mapping_name: The name of the dataSetMapping.
:type data_set_mapping_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: None or the result of cls(response)
+ :return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
# Construct URL
- url = self.delete.metadata['url']
+ url = self.delete.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -234,8 +243,8 @@ async def delete(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
@@ -243,12 +252,12 @@ async def delete(
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {})
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/dataSetMappings/{dataSetMappingName}'}
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/dataSetMappings/{dataSetMappingName}'} # type: ignore
def list_by_share_subscription(
self,
@@ -256,8 +265,10 @@ def list_by_share_subscription(
account_name: str,
share_subscription_name: str,
skip_token: Optional[str] = None,
+ filter: Optional[str] = None,
+ orderby: Optional[str] = None,
**kwargs
- ) -> "models.DataSetMappingList":
+ ) -> AsyncIterable["models.DataSetMappingList"]:
"""List DataSetMappings in a share subscription.
List DataSetMappings in a share subscription.
@@ -270,19 +281,31 @@ def list_by_share_subscription(
:type share_subscription_name: str
:param skip_token: Continuation token.
:type skip_token: str
+ :param filter: Filters the results using OData syntax.
+ :type filter: str
+ :param orderby: Sorts the results using OData syntax.
+ :type orderby: str
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: DataSetMappingList or the result of cls(response)
- :rtype: ~data_share_management_client.models.DataSetMappingList
+ :return: An iterator like instance of either DataSetMappingList or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_share_management_client.models.DataSetMappingList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.DataSetMappingList"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
if not next_link:
# Construct URL
- url = self.list_by_share_subscription.metadata['url']
+ url = self.list_by_share_subscription.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -290,21 +313,21 @@ def prepare_request(next_link=None):
'shareSubscriptionName': self._serialize.url("share_subscription_name", share_subscription_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ if filter is not None:
+ query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
+ if orderby is not None:
+ query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
- if skip_token is not None:
- query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- # Construct and send request
- request = self._client.get(url, query_parameters, header_parameters)
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
@@ -323,11 +346,11 @@ async def get_next(next_link=None):
if response.status_code not in [200]:
error = self._deserialize(models.DataShareError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
- list_by_share_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/dataSetMappings'}
+ list_by_share_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/dataSetMappings'} # type: ignore
diff --git a/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations_async/_data_set_operations_async.py b/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations/_data_sets_operations.py
similarity index 66%
rename from src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations_async/_data_set_operations_async.py
rename to src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations/_data_sets_operations.py
index 077f29e0407..1bbc4aedd31 100644
--- a/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations_async/_data_set_operations_async.py
+++ b/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations/_data_sets_operations.py
@@ -5,22 +5,24 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
-from azure.core.polling import AsyncNoPolling, AsyncPollingMethod, async_poller
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-class DataSetOperations:
- """DataSetOperations async operations.
+class DataSetsOperations:
+ """DataSetsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
@@ -49,9 +51,9 @@ async def get(
data_set_name: str,
**kwargs
) -> "models.DataSet":
- """Get a DataSet in a share.
+ """Get DataSet in a share.
- Get DataSet in a share.
+ Get a DataSet in a share.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -62,16 +64,20 @@ async def get(
:param data_set_name: The name of the dataSet.
:type data_set_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: DataSet or the result of cls(response)
+ :return: DataSet, or the result of cls(response)
:rtype: ~data_share_management_client.models.DataSet
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.DataSet"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
# Construct URL
- url = self.get.metadata['url']
+ url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -87,9 +93,8 @@ async def get(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
@@ -97,15 +102,15 @@ async def get(
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('DataSet', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/dataSets/{dataSetName}'}
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/dataSets/{dataSetName}'} # type: ignore
async def create(
self,
@@ -116,9 +121,9 @@ async def create(
data_set: "models.DataSet",
**kwargs
) -> "models.DataSet":
- """Create a DataSet.
+ """Adds a new data set to an existing share.
- Adds a new data set to an existing share.
+ Create a DataSet.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -131,17 +136,21 @@ async def create(
:param data_set: The new data set information.
:type data_set: ~data_share_management_client.models.DataSet
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: DataSet or the result of cls(response)
- :rtype: ~data_share_management_client.models.DataSet or ~data_share_management_client.models.DataSet
+ :return: DataSet, or the result of cls(response)
+ :rtype: ~data_share_management_client.models.DataSet
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.DataSet"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
# Construct URL
- url = self.create.metadata['url']
+ url = self.create.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -158,23 +167,20 @@ async def create(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(data_set, 'DataSet')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('DataSet', pipeline_response)
@@ -182,10 +188,10 @@ async def create(
deserialized = self._deserialize('DataSet', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/dataSets/{dataSetName}'}
+ create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/dataSets/{dataSetName}'} # type: ignore
async def _delete_initial(
self,
@@ -196,11 +202,15 @@ async def _delete_initial(
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
# Construct URL
- url = self._delete_initial.metadata['url']
+ url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -216,8 +226,8 @@ async def _delete_initial(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
@@ -225,24 +235,24 @@ async def _delete_initial(
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {})
- _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/dataSets/{dataSetName}'}
+ _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/dataSets/{dataSetName}'} # type: ignore
- async def delete(
+ async def begin_delete(
self,
resource_group_name: str,
account_name: str,
share_name: str,
data_set_name: str,
**kwargs
- ) -> None:
- """Delete a DataSet in a share.
+ ) -> AsyncLROPoller[None]:
+ """Delete DataSet in a share.
- Delete DataSet in a share.
+ Delete a DataSet in a share.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -253,38 +263,60 @@ async def delete(
:param data_set_name: The name of the dataSet.
:type data_set_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :return: An instance of LROPoller that returns None
- :rtype: ~azure.core.polling.LROPoller[None]
-
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
- polling = kwargs.pop('polling', False) # type: Union[bool, AsyncPollingMethod]
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
- raw_result = await self._delete_initial(
- resource_group_name=resource_group_name,
- account_name=account_name,
- share_name=share_name,
- data_set_name=data_set_name,
- cls=lambda x,y,z: x,
- **kwargs
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
)
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._delete_initial(
+ resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_name=share_name,
+ data_set_name=data_set_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
- lro_delay = kwargs.get(
- 'polling_interval',
- self._config.polling_interval
- )
- if polling is True: raise ValueError("polling being True is not valid because no default polling implemetation has been defined.")
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'accountName': self._serialize.url("account_name", account_name, 'str'),
+ 'shareName': self._serialize.url("share_name", share_name, 'str'),
+ 'dataSetName': self._serialize.url("data_set_name", data_set_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
- return await async_poller(self._client, raw_result, get_long_running_output, polling_method)
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/dataSets/{dataSetName}'}
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/dataSets/{dataSetName}'} # type: ignore
def list_by_share(
self,
@@ -292,8 +324,10 @@ def list_by_share(
account_name: str,
share_name: str,
skip_token: Optional[str] = None,
+ filter: Optional[str] = None,
+ orderby: Optional[str] = None,
**kwargs
- ) -> "models.DataSetList":
+ ) -> AsyncIterable["models.DataSetList"]:
"""List DataSets in a share.
List DataSets in a share.
@@ -306,19 +340,31 @@ def list_by_share(
:type share_name: str
:param skip_token: continuation token.
:type skip_token: str
+ :param filter: Filters the results using OData syntax.
+ :type filter: str
+ :param orderby: Sorts the results using OData syntax.
+ :type orderby: str
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: DataSetList or the result of cls(response)
- :rtype: ~data_share_management_client.models.DataSetList
+ :return: An iterator like instance of either DataSetList or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_share_management_client.models.DataSetList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.DataSetList"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
if not next_link:
# Construct URL
- url = self.list_by_share.metadata['url']
+ url = self.list_by_share.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -326,21 +372,21 @@ def prepare_request(next_link=None):
'shareName': self._serialize.url("share_name", share_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ if filter is not None:
+ query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
+ if orderby is not None:
+ query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
- if skip_token is not None:
- query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- # Construct and send request
- request = self._client.get(url, query_parameters, header_parameters)
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
@@ -359,11 +405,11 @@ async def get_next(next_link=None):
if response.status_code not in [200]:
error = self._deserialize(models.DataShareError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
- list_by_share.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/dataSets'}
+ list_by_share.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/dataSets'} # type: ignore
diff --git a/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations_async/_invitation_operations_async.py b/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations/_invitations_operations.py
similarity index 72%
rename from src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations_async/_invitation_operations_async.py
rename to src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations/_invitations_operations.py
index c53ea9c8e86..bdb63ea5254 100644
--- a/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations_async/_invitation_operations_async.py
+++ b/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations/_invitations_operations.py
@@ -5,21 +5,22 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-class InvitationOperations:
- """InvitationOperations async operations.
+class InvitationsOperations:
+ """InvitationsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
@@ -48,9 +49,9 @@ async def get(
invitation_name: str,
**kwargs
) -> "models.Invitation":
- """Get an invitation in a share.
+ """Get Invitation in a share.
- Get Invitation in a share.
+ Get an invitation in a share.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -61,16 +62,20 @@ async def get(
:param invitation_name: The name of the invitation.
:type invitation_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: Invitation or the result of cls(response)
+ :return: Invitation, or the result of cls(response)
:rtype: ~data_share_management_client.models.Invitation
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.Invitation"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
# Construct URL
- url = self.get.metadata['url']
+ url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -86,9 +91,8 @@ async def get(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
@@ -96,15 +100,15 @@ async def get(
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('Invitation', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/invitations/{invitationName}'}
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/invitations/{invitationName}'} # type: ignore
async def create(
self,
@@ -112,14 +116,12 @@ async def create(
account_name: str,
share_name: str,
invitation_name: str,
- target_active_directory_id: Optional[str] = None,
- target_email: Optional[str] = None,
- target_object_id: Optional[str] = None,
+ invitation: "models.Invitation",
**kwargs
) -> "models.Invitation":
- """Create an invitation.
+ """Sends a new invitation to a recipient to access a share.
- Sends a new invitation to a recipient to access a share.
+ Create an invitation.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -129,28 +131,24 @@ async def create(
:type share_name: str
:param invitation_name: The name of the invitation.
:type invitation_name: str
- :param target_active_directory_id: The target Azure AD Id. Can't be combined with email.
- :type target_active_directory_id: str
- :param target_email: The email the invitation is directed to.
- :type target_email: str
- :param target_object_id: The target user or application Id that invitation is being sent to.
- Must be specified along TargetActiveDirectoryId. This enables sending
- invitations to specific users or applications in an AD tenant.
- :type target_object_id: str
+ :param invitation: Invitation details.
+ :type invitation: ~data_share_management_client.models.Invitation
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: Invitation or the result of cls(response)
- :rtype: ~data_share_management_client.models.Invitation or ~data_share_management_client.models.Invitation
+ :return: Invitation, or the result of cls(response)
+ :rtype: ~data_share_management_client.models.Invitation
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.Invitation"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
-
- _invitation = models.Invitation(target_active_directory_id=target_active_directory_id, target_email=target_email, target_object_id=target_object_id)
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
# Construct URL
- url = self.create.metadata['url']
+ url = self.create.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -167,23 +165,20 @@ async def create(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(_invitation, 'Invitation')
+ body_content = self._serialize.body(invitation, 'Invitation')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Invitation', pipeline_response)
@@ -191,10 +186,10 @@ async def create(
deserialized = self._deserialize('Invitation', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/invitations/{invitationName}'}
+ create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/invitations/{invitationName}'} # type: ignore
async def delete(
self,
@@ -204,9 +199,9 @@ async def delete(
invitation_name: str,
**kwargs
) -> None:
- """Delete an invitation in a share.
+ """Delete Invitation in a share.
- Delete Invitation in a share.
+ Delete an invitation in a share.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -217,16 +212,20 @@ async def delete(
:param invitation_name: The name of the invitation.
:type invitation_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: None or the result of cls(response)
+ :return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
# Construct URL
- url = self.delete.metadata['url']
+ url = self.delete.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -242,8 +241,8 @@ async def delete(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
@@ -251,12 +250,12 @@ async def delete(
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {})
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/invitations/{invitationName}'}
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/invitations/{invitationName}'} # type: ignore
def list_by_share(
self,
@@ -264,11 +263,13 @@ def list_by_share(
account_name: str,
share_name: str,
skip_token: Optional[str] = None,
+ filter: Optional[str] = None,
+ orderby: Optional[str] = None,
**kwargs
- ) -> "models.InvitationList":
- """List invitations in a share.
+ ) -> AsyncIterable["models.InvitationList"]:
+ """List all Invitations in a share.
- List all Invitations in a share.
+ List invitations in a share.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -278,19 +279,31 @@ def list_by_share(
:type share_name: str
:param skip_token: The continuation token.
:type skip_token: str
+ :param filter: Filters the results using OData syntax.
+ :type filter: str
+ :param orderby: Sorts the results using OData syntax.
+ :type orderby: str
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: InvitationList or the result of cls(response)
- :rtype: ~data_share_management_client.models.InvitationList
+ :return: An iterator like instance of either InvitationList or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_share_management_client.models.InvitationList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.InvitationList"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
if not next_link:
# Construct URL
- url = self.list_by_share.metadata['url']
+ url = self.list_by_share.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -298,21 +311,21 @@ def prepare_request(next_link=None):
'shareName': self._serialize.url("share_name", share_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ if filter is not None:
+ query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
+ if orderby is not None:
+ query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
- if skip_token is not None:
- query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- # Construct and send request
- request = self._client.get(url, query_parameters, header_parameters)
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
@@ -331,11 +344,11 @@ async def get_next(next_link=None):
if response.status_code not in [200]:
error = self._deserialize(models.DataShareError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
- list_by_share.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/invitations'}
+ list_by_share.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/invitations'} # type: ignore
diff --git a/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations_async/_operation_operations_async.py b/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations/_operations.py
similarity index 66%
rename from src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations_async/_operation_operations_async.py
rename to src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations/_operations.py
index 397c94d67b8..d440c51fe31 100644
--- a/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations_async/_operation_operations_async.py
+++ b/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations/_operations.py
@@ -5,21 +5,22 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, Callable, Dict, Generic, Optional, TypeVar
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-class OperationOperations:
- """OperationOperations async operations.
+class Operations:
+ """Operations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
@@ -43,37 +44,41 @@ def __init__(self, client, config, serializer, deserializer) -> None:
def list(
self,
**kwargs
- ) -> "models.OperationList":
- """List of available operations.
+ ) -> AsyncIterable["models.OperationList"]:
+ """Lists the available operations.
- Lists the available operations.
+ List of available operations.
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: OperationList or the result of cls(response)
- :rtype: ~data_share_management_client.models.OperationList
+ :return: An iterator like instance of either OperationList or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_share_management_client.models.OperationList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.OperationList"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
if not next_link:
# Construct URL
- url = self.list.metadata['url']
+ url = self.list.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- # Construct and send request
- request = self._client.get(url, query_parameters, header_parameters)
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
@@ -92,11 +97,11 @@ async def get_next(next_link=None):
if response.status_code not in [200]:
error = self._deserialize(models.DataShareError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
- list.metadata = {'url': '/providers/Microsoft.DataShare/operations'}
+ list.metadata = {'url': '/providers/Microsoft.DataShare/operations'} # type: ignore
diff --git a/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations_async/_provider_share_subscription_operations_async.py b/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations/_provider_share_subscriptions_operations.py
similarity index 56%
rename from src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations_async/_provider_share_subscription_operations_async.py
rename to src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations/_provider_share_subscriptions_operations.py
index e516dc2824a..701f841bd5d 100644
--- a/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations_async/_provider_share_subscription_operations_async.py
+++ b/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations/_provider_share_subscriptions_operations.py
@@ -5,22 +5,24 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
-from azure.core.polling import AsyncNoPolling, AsyncPollingMethod, async_poller
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-class ProviderShareSubscriptionOperations:
- """ProviderShareSubscriptionOperations async operations.
+class ProviderShareSubscriptionsOperations:
+ """ProviderShareSubscriptionsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
@@ -41,17 +43,18 @@ def __init__(self, client, config, serializer, deserializer) -> None:
self._deserialize = deserializer
self._config = config
- async def get_by_share(
+ async def adjust(
self,
resource_group_name: str,
account_name: str,
share_name: str,
provider_share_subscription_id: str,
+ provider_share_subscription: "models.ProviderShareSubscription",
**kwargs
) -> "models.ProviderShareSubscription":
- """Get share subscription in a provider share.
+ """Adjust the expiration date of a share subscription in a provider share.
- Get share subscription in a provider share.
+ Adjust a share subscription's expiration date in a provider share.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -61,17 +64,24 @@ async def get_by_share(
:type share_name: str
:param provider_share_subscription_id: To locate shareSubscription.
:type provider_share_subscription_id: str
+ :param provider_share_subscription: The provider share subscription.
+ :type provider_share_subscription: ~data_share_management_client.models.ProviderShareSubscription
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: ProviderShareSubscription or the result of cls(response)
+ :return: ProviderShareSubscription, or the result of cls(response)
:rtype: ~data_share_management_client.models.ProviderShareSubscription
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ProviderShareSubscription"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
# Construct URL
- url = self.get_by_share.metadata['url']
+ url = self.adjust.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -87,37 +97,41 @@ async def get_by_share(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
- request = self._client.get(url, query_parameters, header_parameters)
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(provider_share_subscription, 'ProviderShareSubscription')
+ body_content_kwargs['content'] = body_content
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('ProviderShareSubscription', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- get_by_share.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/providerShareSubscriptions/{providerShareSubscriptionId}'}
+ adjust.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/providerShareSubscriptions/{providerShareSubscriptionId}/adjust'} # type: ignore
- def list_by_share(
+ async def reinstate(
self,
resource_group_name: str,
account_name: str,
share_name: str,
- skip_token: Optional[str] = None,
+ provider_share_subscription_id: str,
+ provider_share_subscription: "models.ProviderShareSubscription",
**kwargs
- ) -> "models.ProviderShareSubscriptionList":
- """List share subscriptions in a provider share.
+ ) -> "models.ProviderShareSubscription":
+ """Reinstate share subscription in a provider share.
- List of available share subscriptions to a provider share.
+ Reinstate share subscription in a provider share.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -125,69 +139,63 @@ def list_by_share(
:type account_name: str
:param share_name: The name of the share.
:type share_name: str
- :param skip_token: Continuation Token.
- :type skip_token: str
+ :param provider_share_subscription_id: To locate shareSubscription.
+ :type provider_share_subscription_id: str
+ :param provider_share_subscription: The provider share subscription.
+ :type provider_share_subscription: ~data_share_management_client.models.ProviderShareSubscription
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: ProviderShareSubscriptionList or the result of cls(response)
- :rtype: ~data_share_management_client.models.ProviderShareSubscriptionList
+ :return: ProviderShareSubscription, or the result of cls(response)
+ :rtype: ~data_share_management_client.models.ProviderShareSubscription
:raises: ~azure.core.exceptions.HttpResponseError
"""
- cls = kwargs.pop('cls', None) # type: ClsType["models.ProviderShareSubscriptionList"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
-
- def prepare_request(next_link=None):
- if not next_link:
- # Construct URL
- url = self.list_by_share.metadata['url']
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
- 'accountName': self._serialize.url("account_name", account_name, 'str'),
- 'shareName': self._serialize.url("share_name", share_name, 'str'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- else:
- url = next_link
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
- if skip_token is not None:
- query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ProviderShareSubscription"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
+ # Construct URL
+ url = self.reinstate.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'accountName': self._serialize.url("account_name", account_name, 'str'),
+ 'shareName': self._serialize.url("share_name", share_name, 'str'),
+ 'providerShareSubscriptionId': self._serialize.url("provider_share_subscription_id", provider_share_subscription_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
- # Construct and send request
- request = self._client.get(url, query_parameters, header_parameters)
- return request
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
- async def extract_data(pipeline_response):
- deserialized = self._deserialize('ProviderShareSubscriptionList', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, AsyncList(list_of_elem)
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- async def get_next(next_link=None):
- request = prepare_request(next_link)
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(provider_share_subscription, 'ProviderShareSubscription')
+ body_content_kwargs['content'] = body_content
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.DataShareError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- if response.status_code not in [200]:
- error = self._deserialize(models.DataShareError, response)
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, model=error)
+ deserialized = self._deserialize('ProviderShareSubscription', pipeline_response)
- return pipeline_response
+ if cls:
+ return cls(pipeline_response, deserialized, {})
- return AsyncItemPaged(
- get_next, extract_data
- )
- list_by_share.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/providerShareSubscriptions'}
+ return deserialized
+ reinstate.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/providerShareSubscriptions/{providerShareSubscriptionId}/reinstate'} # type: ignore
async def _revoke_initial(
self,
@@ -198,11 +206,15 @@ async def _revoke_initial(
**kwargs
) -> "models.ProviderShareSubscription":
cls = kwargs.pop('cls', None) # type: ClsType["models.ProviderShareSubscription"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
# Construct URL
- url = self._revoke_initial.metadata['url']
+ url = self._revoke_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -218,9 +230,8 @@ async def _revoke_initial(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
@@ -228,9 +239,8 @@ async def _revoke_initial(
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ProviderShareSubscription', pipeline_response)
@@ -238,19 +248,19 @@ async def _revoke_initial(
deserialized = self._deserialize('ProviderShareSubscription', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- _revoke_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/providerShareSubscriptions/{providerShareSubscriptionId}/revoke'}
+ _revoke_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/providerShareSubscriptions/{providerShareSubscriptionId}/revoke'} # type: ignore
- async def revoke(
+ async def begin_revoke(
self,
resource_group_name: str,
account_name: str,
share_name: str,
provider_share_subscription_id: str,
**kwargs
- ) -> "models.ProviderShareSubscription":
+ ) -> AsyncLROPoller["models.ProviderShareSubscription"]:
"""Revoke share subscription in a provider share.
Revoke share subscription in a provider share.
@@ -264,24 +274,34 @@ async def revoke(
:param provider_share_subscription_id: To locate shareSubscription.
:type provider_share_subscription_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :return: An instance of LROPoller that returns ProviderShareSubscription
- :rtype: ~azure.core.polling.LROPoller[~data_share_management_client.models.ProviderShareSubscription]
-
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either ProviderShareSubscription or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~data_share_management_client.models.ProviderShareSubscription]
:raises ~azure.core.exceptions.HttpResponseError:
"""
- polling = kwargs.pop('polling', False) # type: Union[bool, AsyncPollingMethod]
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.ProviderShareSubscription"]
- raw_result = await self._revoke_initial(
- resource_group_name=resource_group_name,
- account_name=account_name,
- share_name=share_name,
- provider_share_subscription_id=provider_share_subscription_id,
- cls=lambda x,y,z: x,
- **kwargs
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
)
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._revoke_initial(
+ resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_name=share_name,
+ provider_share_subscription_id=provider_share_subscription_id,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ProviderShareSubscription', pipeline_response)
@@ -290,17 +310,29 @@ def get_long_running_output(pipeline_response):
return cls(pipeline_response, deserialized, {})
return deserialized
- lro_delay = kwargs.get(
- 'polling_interval',
- self._config.polling_interval
- )
- if polling is True: raise ValueError("polling being True is not valid because no default polling implemetation has been defined.")
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'accountName': self._serialize.url("account_name", account_name, 'str'),
+ 'shareName': self._serialize.url("share_name", share_name, 'str'),
+ 'providerShareSubscriptionId': self._serialize.url("provider_share_subscription_id", provider_share_subscription_id, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
- return await async_poller(self._client, raw_result, get_long_running_output, polling_method)
- revoke.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/providerShareSubscriptions/{providerShareSubscriptionId}/revoke'}
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_revoke.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/providerShareSubscriptions/{providerShareSubscriptionId}/revoke'} # type: ignore
- async def reinstate(
+ async def get_by_share(
self,
resource_group_name: str,
account_name: str,
@@ -308,9 +340,9 @@ async def reinstate(
provider_share_subscription_id: str,
**kwargs
) -> "models.ProviderShareSubscription":
- """Reinstate share subscription in a provider share.
+ """Get share subscription in a provider share.
- Reinstate share subscription in a provider share.
+ Get share subscription in a provider share.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -321,16 +353,20 @@ async def reinstate(
:param provider_share_subscription_id: To locate shareSubscription.
:type provider_share_subscription_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: ProviderShareSubscription or the result of cls(response)
+ :return: ProviderShareSubscription, or the result of cls(response)
:rtype: ~data_share_management_client.models.ProviderShareSubscription
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ProviderShareSubscription"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
# Construct URL
- url = self.reinstate.metadata['url']
+ url = self.get_by_share.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -346,22 +382,107 @@ async def reinstate(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
- request = self._client.post(url, query_parameters, header_parameters)
+ request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('ProviderShareSubscription', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- reinstate.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/providerShareSubscriptions/{providerShareSubscriptionId}/reinstate'}
+ get_by_share.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/providerShareSubscriptions/{providerShareSubscriptionId}'} # type: ignore
+
+ def list_by_share(
+ self,
+ resource_group_name: str,
+ account_name: str,
+ share_name: str,
+ skip_token: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.ProviderShareSubscriptionList"]:
+ """List of available share subscriptions to a provider share.
+
+ List share subscriptions in a provider share.
+
+ :param resource_group_name: The resource group name.
+ :type resource_group_name: str
+ :param account_name: The name of the share account.
+ :type account_name: str
+ :param share_name: The name of the share.
+ :type share_name: str
+ :param skip_token: Continuation Token.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ProviderShareSubscriptionList or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_share_management_client.models.ProviderShareSubscriptionList]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ProviderShareSubscriptionList"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_share.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'accountName': self._serialize.url("account_name", account_name, 'str'),
+ 'shareName': self._serialize.url("share_name", share_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('ProviderShareSubscriptionList', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.DataShareError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list_by_share.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/providerShareSubscriptions'} # type: ignore
diff --git a/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations_async/_share_subscription_operations_async.py b/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations/_share_subscriptions_operations.py
similarity index 57%
rename from src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations_async/_share_subscription_operations_async.py
rename to src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations/_share_subscriptions_operations.py
index 4416c305e0e..96a638c85dd 100644
--- a/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations_async/_share_subscription_operations_async.py
+++ b/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations/_share_subscriptions_operations.py
@@ -5,22 +5,24 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
-from azure.core.polling import AsyncNoPolling, AsyncPollingMethod, async_poller
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-class ShareSubscriptionOperations:
- """ShareSubscriptionOperations async operations.
+class ShareSubscriptionsOperations:
+ """ShareSubscriptionsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
@@ -41,105 +43,25 @@ def __init__(self, client, config, serializer, deserializer) -> None:
self._deserialize = deserializer
self._config = config
- async def get(
+ async def _cancel_synchronization_initial(
self,
resource_group_name: str,
account_name: str,
share_subscription_name: str,
+ share_subscription_synchronization: "models.ShareSubscriptionSynchronization",
**kwargs
- ) -> "models.ShareSubscription":
- """Get a shareSubscription in an account.
-
- Get shareSubscription in an account.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param account_name: The name of the share account.
- :type account_name: str
- :param share_subscription_name: The name of the shareSubscription.
- :type share_subscription_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: ShareSubscription or the result of cls(response)
- :rtype: ~data_share_management_client.models.ShareSubscription
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.ShareSubscription"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
-
- # Construct URL
- url = self.get.metadata['url']
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
- 'accountName': self._serialize.url("account_name", account_name, 'str'),
- 'shareSubscriptionName': self._serialize.url("share_subscription_name", share_subscription_name, 'str'),
+ ) -> "models.ShareSubscriptionSynchronization":
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ShareSubscriptionSynchronization"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- # Construct and send request
- request = self._client.get(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
-
- deserialized = self._deserialize('ShareSubscription', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}'}
-
- async def create(
- self,
- resource_group_name: str,
- account_name: str,
- share_subscription_name: str,
- invitation_id: str,
- source_share_location: str,
- **kwargs
- ) -> "models.ShareSubscription":
- """Create a shareSubscription in an account.
-
- Create shareSubscription in an account.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param account_name: The name of the share account.
- :type account_name: str
- :param share_subscription_name: The name of the shareSubscription.
- :type share_subscription_name: str
- :param invitation_id: The invitation id.
- :type invitation_id: str
- :param source_share_location: Source share location.
- :type source_share_location: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: ShareSubscription or the result of cls(response)
- :rtype: ~data_share_management_client.models.ShareSubscription or ~data_share_management_client.models.ShareSubscription
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.ShareSubscription"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
-
- _share_subscription = models.ShareSubscription(invitation_id=invitation_id, source_share_location=source_share_location)
- api_version = "2019-11-01"
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
# Construct URL
- url = self.create.metadata['url']
+ url = self._cancel_synchronization_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -155,94 +77,43 @@ async def create(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(_share_subscription, 'ShareSubscription')
+ body_content = self._serialize.body(share_subscription_synchronization, 'ShareSubscriptionSynchronization')
body_content_kwargs['content'] = body_content
- request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
- if response.status_code not in [200, 201]:
+ if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = None
if response.status_code == 200:
- deserialized = self._deserialize('ShareSubscription', pipeline_response)
-
- if response.status_code == 201:
- deserialized = self._deserialize('ShareSubscription', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}'}
-
- async def _delete_initial(
- self,
- resource_group_name: str,
- account_name: str,
- share_subscription_name: str,
- **kwargs
- ) -> "models.OperationResponse":
- cls = kwargs.pop('cls', None) # type: ClsType["models.OperationResponse"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
-
- # Construct URL
- url = self._delete_initial.metadata['url']
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
- 'accountName': self._serialize.url("account_name", account_name, 'str'),
- 'shareSubscriptionName': self._serialize.url("share_subscription_name", share_subscription_name, 'str'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- # Construct and send request
- request = self._client.delete(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 202, 204]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ deserialized = self._deserialize('ShareSubscriptionSynchronization', pipeline_response)
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize('OperationResponse', pipeline_response)
+ if response.status_code == 202:
+ deserialized = self._deserialize('ShareSubscriptionSynchronization', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}'}
+ _cancel_synchronization_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/cancelSynchronization'} # type: ignore
- async def delete(
+ async def begin_cancel_synchronization(
self,
resource_group_name: str,
account_name: str,
share_subscription_name: str,
+ share_subscription_synchronization: "models.ShareSubscriptionSynchronization",
**kwargs
- ) -> "models.OperationResponse":
- """Delete a shareSubscription in an account.
+ ) -> AsyncLROPoller["models.ShareSubscriptionSynchronization"]:
+ """Request cancellation of a data share snapshot.
- Delete shareSubscription in an account.
+ Request to cancel a synchronization.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -250,131 +121,77 @@ async def delete(
:type account_name: str
:param share_subscription_name: The name of the shareSubscription.
:type share_subscription_name: str
+ :param share_subscription_synchronization: Share Subscription Synchronization payload.
+ :type share_subscription_synchronization: ~data_share_management_client.models.ShareSubscriptionSynchronization
:keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :return: An instance of LROPoller that returns OperationResponse
- :rtype: ~azure.core.polling.LROPoller[~data_share_management_client.models.OperationResponse]
-
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either ShareSubscriptionSynchronization or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~data_share_management_client.models.ShareSubscriptionSynchronization]
:raises ~azure.core.exceptions.HttpResponseError:
"""
- polling = kwargs.pop('polling', False) # type: Union[bool, AsyncPollingMethod]
- cls = kwargs.pop('cls', None) # type: ClsType["models.OperationResponse"]
- raw_result = await self._delete_initial(
- resource_group_name=resource_group_name,
- account_name=account_name,
- share_subscription_name=share_subscription_name,
- cls=lambda x,y,z: x,
- **kwargs
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ShareSubscriptionSynchronization"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
)
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._cancel_synchronization_initial(
+ resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_subscription_name=share_subscription_name,
+ share_subscription_synchronization=share_subscription_synchronization,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize('OperationResponse', pipeline_response)
+ deserialized = self._deserialize('ShareSubscriptionSynchronization', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
- lro_delay = kwargs.get(
- 'polling_interval',
- self._config.polling_interval
- )
- if polling is True: raise ValueError("polling being True is not valid because no default polling implemetation has been defined.")
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'accountName': self._serialize.url("account_name", account_name, 'str'),
+ 'shareSubscriptionName': self._serialize.url("share_subscription_name", share_subscription_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
- return await async_poller(self._client, raw_result, get_long_running_output, polling_method)
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}'}
-
- def list_by_account(
- self,
- resource_group_name: str,
- account_name: str,
- skip_token: Optional[str] = None,
- **kwargs
- ) -> "models.ShareSubscriptionList":
- """List share subscriptions in an account.
-
- List of available share subscriptions under an account.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param account_name: The name of the share account.
- :type account_name: str
- :param skip_token: Continuation Token.
- :type skip_token: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: ShareSubscriptionList or the result of cls(response)
- :rtype: ~data_share_management_client.models.ShareSubscriptionList
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.ShareSubscriptionList"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
-
- def prepare_request(next_link=None):
- if not next_link:
- # Construct URL
- url = self.list_by_account.metadata['url']
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
- 'accountName': self._serialize.url("account_name", account_name, 'str'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- else:
- url = next_link
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
- if skip_token is not None:
- query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- # Construct and send request
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- async def extract_data(pipeline_response):
- deserialized = self._deserialize('ShareSubscriptionList', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, AsyncList(list_of_elem)
-
- async def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- error = self._deserialize(models.DataShareError, response)
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, model=error)
-
- return pipeline_response
-
- return AsyncItemPaged(
- get_next, extract_data
- )
- list_by_account.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions'}
-
- def list_source_share_synchronization_setting(
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_cancel_synchronization.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/cancelSynchronization'} # type: ignore
+
+ def list_source_share_synchronization_settings(
self,
resource_group_name: str,
account_name: str,
share_subscription_name: str,
skip_token: Optional[str] = None,
**kwargs
- ) -> "models.SourceShareSynchronizationSettingList":
- """Get synchronization settings set on a share.
+ ) -> AsyncIterable["models.SourceShareSynchronizationSettingList"]:
+ """Get source share synchronization settings for a shareSubscription.
- Get source share synchronization settings for a shareSubscription.
+ Get synchronization settings set on a share.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -385,18 +202,26 @@ def list_source_share_synchronization_setting(
:param skip_token: Continuation token.
:type skip_token: str
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: SourceShareSynchronizationSettingList or the result of cls(response)
- :rtype: ~data_share_management_client.models.SourceShareSynchronizationSettingList
+ :return: An iterator like instance of either SourceShareSynchronizationSettingList or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_share_management_client.models.SourceShareSynchronizationSettingList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.SourceShareSynchronizationSettingList"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
if not next_link:
# Construct URL
- url = self.list_source_share_synchronization_setting.metadata['url']
+ url = self.list_source_share_synchronization_settings.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -404,21 +229,17 @@ def prepare_request(next_link=None):
'shareSubscriptionName': self._serialize.url("share_subscription_name", share_subscription_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
else:
url = next_link
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
- if skip_token is not None:
- query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- # Construct and send request
- request = self._client.post(url, query_parameters, header_parameters)
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
@@ -437,26 +258,29 @@ async def get_next(next_link=None):
if response.status_code not in [200]:
error = self._deserialize(models.DataShareError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
- list_source_share_synchronization_setting.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/listSourceShareSynchronizationSettings'}
+ list_source_share_synchronization_settings.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/listSourceShareSynchronizationSettings'} # type: ignore
- def list_synchronization(
+ def list_synchronization_details(
self,
resource_group_name: str,
account_name: str,
share_subscription_name: str,
+ share_subscription_synchronization: "models.ShareSubscriptionSynchronization",
skip_token: Optional[str] = None,
+ filter: Optional[str] = None,
+ orderby: Optional[str] = None,
**kwargs
- ) -> "models.ShareSubscriptionSynchronizationList":
- """List synchronizations of a share subscription.
+ ) -> AsyncIterable["models.SynchronizationDetailsList"]:
+ """List data set level details for a share subscription synchronization.
- List Synchronizations in a share subscription.
+ List synchronization details.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -464,21 +288,37 @@ def list_synchronization(
:type account_name: str
:param share_subscription_name: The name of the share subscription.
:type share_subscription_name: str
+ :param share_subscription_synchronization: Share Subscription Synchronization payload.
+ :type share_subscription_synchronization: ~data_share_management_client.models.ShareSubscriptionSynchronization
:param skip_token: Continuation token.
:type skip_token: str
+ :param filter: Filters the results using OData syntax.
+ :type filter: str
+ :param orderby: Sorts the results using OData syntax.
+ :type orderby: str
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: ShareSubscriptionSynchronizationList or the result of cls(response)
- :rtype: ~data_share_management_client.models.ShareSubscriptionSynchronizationList
+ :return: An iterator like instance of either SynchronizationDetailsList or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_share_management_client.models.SynchronizationDetailsList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
- cls = kwargs.pop('cls', None) # type: ClsType["models.ShareSubscriptionSynchronizationList"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ cls = kwargs.pop('cls', None) # type: ClsType["models.SynchronizationDetailsList"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ content_type = "application/json"
+ accept = "application/json"
def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
if not next_link:
# Construct URL
- url = self.list_synchronization.metadata['url']
+ url = self.list_synchronization_details.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -486,25 +326,31 @@ def prepare_request(next_link=None):
'shareSubscriptionName': self._serialize.url("share_subscription_name", share_subscription_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ if filter is not None:
+ query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
+ if orderby is not None:
+ query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(share_subscription_synchronization, 'ShareSubscriptionSynchronization')
+ body_content_kwargs['content'] = body_content
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
else:
url = next_link
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
- if skip_token is not None:
- query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- # Construct and send request
- request = self._client.post(url, query_parameters, header_parameters)
+ query_parameters = {} # type: Dict[str, Any]
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(share_subscription_synchronization, 'ShareSubscriptionSynchronization')
+ body_content_kwargs['content'] = body_content
+ request = self._client.get(url, query_parameters, header_parameters, **body_content_kwargs)
return request
async def extract_data(pipeline_response):
- deserialized = self._deserialize('ShareSubscriptionSynchronizationList', pipeline_response)
+ deserialized = self._deserialize('SynchronizationDetailsList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
@@ -519,27 +365,28 @@ async def get_next(next_link=None):
if response.status_code not in [200]:
error = self._deserialize(models.DataShareError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
- list_synchronization.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/listSynchronizations'}
+ list_synchronization_details.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/listSynchronizationDetails'} # type: ignore
- def list_synchronization_detail(
+ def list_synchronizations(
self,
resource_group_name: str,
account_name: str,
share_subscription_name: str,
- synchronization_id: str,
skip_token: Optional[str] = None,
+ filter: Optional[str] = None,
+ orderby: Optional[str] = None,
**kwargs
- ) -> "models.SynchronizationDetailsList":
- """List synchronization details.
+ ) -> AsyncIterable["models.ShareSubscriptionSynchronizationList"]:
+ """List Synchronizations in a share subscription.
- List data set level details for a share subscription synchronization.
+ List synchronizations of a share subscription.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -547,25 +394,33 @@ def list_synchronization_detail(
:type account_name: str
:param share_subscription_name: The name of the share subscription.
:type share_subscription_name: str
- :param synchronization_id: Synchronization id.
- :type synchronization_id: str
:param skip_token: Continuation token.
:type skip_token: str
+ :param filter: Filters the results using OData syntax.
+ :type filter: str
+ :param orderby: Sorts the results using OData syntax.
+ :type orderby: str
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: SynchronizationDetailsList or the result of cls(response)
- :rtype: ~data_share_management_client.models.SynchronizationDetailsList
+ :return: An iterator like instance of either ShareSubscriptionSynchronizationList or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_share_management_client.models.ShareSubscriptionSynchronizationList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
- cls = kwargs.pop('cls', None) # type: ClsType["models.SynchronizationDetailsList"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- _share_subscription_synchronization = models.ShareSubscriptionSynchronization(synchronization_id=synchronization_id)
- api_version = "2019-11-01"
- content_type = "application/json"
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ShareSubscriptionSynchronizationList"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
if not next_link:
# Construct URL
- url = self.list_synchronization_detail.metadata['url']
+ url = self.list_synchronizations.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -573,30 +428,25 @@ def prepare_request(next_link=None):
'shareSubscriptionName': self._serialize.url("share_subscription_name", share_subscription_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ if filter is not None:
+ query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
+ if orderby is not None:
+ query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
else:
url = next_link
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
- if skip_token is not None:
- query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- # Construct and send request
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(_share_subscription_synchronization, 'ShareSubscriptionSynchronization')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
- deserialized = self._deserialize('SynchronizationDetailsList', pipeline_response)
+ deserialized = self._deserialize('ShareSubscriptionSynchronizationList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
@@ -611,32 +461,34 @@ async def get_next(next_link=None):
if response.status_code not in [200]:
error = self._deserialize(models.DataShareError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
- list_synchronization_detail.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/listSynchronizationDetails'}
+ list_synchronizations.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/listSynchronizations'} # type: ignore
async def _synchronize_initial(
self,
resource_group_name: str,
account_name: str,
share_subscription_name: str,
- synchronization_mode: Optional[Union[str, "models.SynchronizationMode"]] = None,
+ synchronize: "models.Synchronize",
**kwargs
) -> "models.ShareSubscriptionSynchronization":
cls = kwargs.pop('cls', None) # type: ClsType["models.ShareSubscriptionSynchronization"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
-
- _synchronize = models.Synchronize(synchronization_mode=synchronization_mode)
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
# Construct URL
- url = self._synchronize_initial.metadata['url']
+ url = self._synchronize_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -652,23 +504,20 @@ async def _synchronize_initial(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(_synchronize, 'Synchronize')
+ body_content = self._serialize.body(synchronize, 'Synchronize')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ShareSubscriptionSynchronization', pipeline_response)
@@ -676,22 +525,22 @@ async def _synchronize_initial(
deserialized = self._deserialize('ShareSubscriptionSynchronization', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- _synchronize_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/Synchronize'}
+ _synchronize_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/synchronize'} # type: ignore
- async def synchronize(
+ async def begin_synchronize(
self,
resource_group_name: str,
account_name: str,
share_subscription_name: str,
- synchronization_mode: Optional[Union[str, "models.SynchronizationMode"]] = None,
+ synchronize: "models.Synchronize",
**kwargs
- ) -> "models.ShareSubscriptionSynchronization":
- """Initiate a copy.
+ ) -> AsyncLROPoller["models.ShareSubscriptionSynchronization"]:
+ """Initiate an asynchronous data share job.
- Initiate an asynchronous data share job.
+ Initiate a copy.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -699,28 +548,37 @@ async def synchronize(
:type account_name: str
:param share_subscription_name: The name of share subscription.
:type share_subscription_name: str
- :param synchronization_mode: Mode of synchronization used in triggers and snapshot sync.
- Incremental by default.
- :type synchronization_mode: str or ~data_share_management_client.models.SynchronizationMode
+ :param synchronize: Synchronize payload.
+ :type synchronize: ~data_share_management_client.models.Synchronize
:keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :return: An instance of LROPoller that returns ShareSubscriptionSynchronization
- :rtype: ~azure.core.polling.LROPoller[~data_share_management_client.models.ShareSubscriptionSynchronization]
-
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either ShareSubscriptionSynchronization or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~data_share_management_client.models.ShareSubscriptionSynchronization]
:raises ~azure.core.exceptions.HttpResponseError:
"""
- polling = kwargs.pop('polling', False) # type: Union[bool, AsyncPollingMethod]
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.ShareSubscriptionSynchronization"]
- raw_result = await self._synchronize_initial(
- resource_group_name=resource_group_name,
- account_name=account_name,
- share_subscription_name=share_subscription_name,
- synchronization_mode=synchronization_mode,
- cls=lambda x,y,z: x,
- **kwargs
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
)
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._synchronize_initial(
+ resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_subscription_name=share_subscription_name,
+ synchronize=synchronize,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ShareSubscriptionSynchronization', pipeline_response)
@@ -729,33 +587,128 @@ def get_long_running_output(pipeline_response):
return cls(pipeline_response, deserialized, {})
return deserialized
- lro_delay = kwargs.get(
- 'polling_interval',
- self._config.polling_interval
- )
- if polling is True: raise ValueError("polling being True is not valid because no default polling implemetation has been defined.")
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'accountName': self._serialize.url("account_name", account_name, 'str'),
+ 'shareSubscriptionName': self._serialize.url("share_subscription_name", share_subscription_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
- return await async_poller(self._client, raw_result, get_long_running_output, polling_method)
- synchronize.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/Synchronize'}
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_synchronize.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/synchronize'} # type: ignore
- async def _cancel_synchronization_initial(
+ async def get(
self,
resource_group_name: str,
account_name: str,
share_subscription_name: str,
- synchronization_id: str,
**kwargs
- ) -> "models.ShareSubscriptionSynchronization":
- cls = kwargs.pop('cls', None) # type: ClsType["models.ShareSubscriptionSynchronization"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
+ ) -> "models.ShareSubscription":
+ """Get shareSubscription in an account.
+
+ Get a shareSubscription in an account.
+
+ :param resource_group_name: The resource group name.
+ :type resource_group_name: str
+ :param account_name: The name of the share account.
+ :type account_name: str
+ :param share_subscription_name: The name of the shareSubscription.
+ :type share_subscription_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ShareSubscription, or the result of cls(response)
+ :rtype: ~data_share_management_client.models.ShareSubscription
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ShareSubscription"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'accountName': self._serialize.url("account_name", account_name, 'str'),
+ 'shareSubscriptionName': self._serialize.url("share_subscription_name", share_subscription_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- _share_subscription_synchronization = models.ShareSubscriptionSynchronization(synchronization_id=synchronization_id)
- api_version = "2019-11-01"
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.DataShareError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ShareSubscription', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}'} # type: ignore
+
+ async def create(
+ self,
+ resource_group_name: str,
+ account_name: str,
+ share_subscription_name: str,
+ share_subscription: "models.ShareSubscription",
+ **kwargs
+ ) -> "models.ShareSubscription":
+ """Create shareSubscription in an account.
+
+ Create a shareSubscription in an account.
+
+ :param resource_group_name: The resource group name.
+ :type resource_group_name: str
+ :param account_name: The name of the share account.
+ :type account_name: str
+ :param share_subscription_name: The name of the shareSubscription.
+ :type share_subscription_name: str
+ :param share_subscription: create parameters for shareSubscription.
+ :type share_subscription: ~data_share_management_client.models.ShareSubscription
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ShareSubscription, or the result of cls(response)
+ :rtype: ~data_share_management_client.models.ShareSubscription
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ShareSubscription"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
# Construct URL
- url = self._cancel_synchronization_initial.metadata['url']
+ url = self.create.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -771,46 +724,94 @@ async def _cancel_synchronization_initial(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(_share_subscription_synchronization, 'ShareSubscriptionSynchronization')
+ body_content = self._serialize.body(share_subscription, 'ShareSubscription')
body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.DataShareError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('ShareSubscription', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('ShareSubscription', pipeline_response)
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}'} # type: ignore
+
+ async def _delete_initial(
+ self,
+ resource_group_name: str,
+ account_name: str,
+ share_subscription_name: str,
+ **kwargs
+ ) -> Optional["models.OperationResponse"]:
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.OperationResponse"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._delete_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'accountName': self._serialize.url("account_name", account_name, 'str'),
+ 'shareSubscriptionName': self._serialize.url("share_subscription_name", share_subscription_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
- if response.status_code not in [200, 202]:
+ if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
- deserialized = self._deserialize('ShareSubscriptionSynchronization', pipeline_response)
-
- if response.status_code == 202:
- deserialized = self._deserialize('ShareSubscriptionSynchronization', pipeline_response)
+ deserialized = self._deserialize('OperationResponse', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- _cancel_synchronization_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/cancelSynchronization'}
+ _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}'} # type: ignore
- async def cancel_synchronization(
+ async def begin_delete(
self,
resource_group_name: str,
account_name: str,
share_subscription_name: str,
- synchronization_id: str,
**kwargs
- ) -> "models.ShareSubscriptionSynchronization":
- """Request to cancel a synchronization.
+ ) -> AsyncLROPoller["models.OperationResponse"]:
+ """Delete shareSubscription in an account.
- Request cancellation of a data share snapshot.
+ Delete a shareSubscription in an account.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -818,41 +819,151 @@ async def cancel_synchronization(
:type account_name: str
:param share_subscription_name: The name of the shareSubscription.
:type share_subscription_name: str
- :param synchronization_id: Synchronization id.
- :type synchronization_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :return: An instance of LROPoller that returns ShareSubscriptionSynchronization
- :rtype: ~azure.core.polling.LROPoller[~data_share_management_client.models.ShareSubscriptionSynchronization]
-
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either OperationResponse or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~data_share_management_client.models.OperationResponse]
:raises ~azure.core.exceptions.HttpResponseError:
"""
- polling = kwargs.pop('polling', False) # type: Union[bool, AsyncPollingMethod]
- cls = kwargs.pop('cls', None) # type: ClsType["models.ShareSubscriptionSynchronization"]
- raw_result = await self._cancel_synchronization_initial(
- resource_group_name=resource_group_name,
- account_name=account_name,
- share_subscription_name=share_subscription_name,
- synchronization_id=synchronization_id,
- cls=lambda x,y,z: x,
- **kwargs
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OperationResponse"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
)
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._delete_initial(
+ resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_subscription_name=share_subscription_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize('ShareSubscriptionSynchronization', pipeline_response)
+ deserialized = self._deserialize('OperationResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
- lro_delay = kwargs.get(
- 'polling_interval',
- self._config.polling_interval
- )
- if polling is True: raise ValueError("polling being True is not valid because no default polling implemetation has been defined.")
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'accountName': self._serialize.url("account_name", account_name, 'str'),
+ 'shareSubscriptionName': self._serialize.url("share_subscription_name", share_subscription_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
- return await async_poller(self._client, raw_result, get_long_running_output, polling_method)
- cancel_synchronization.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/cancelSynchronization'}
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}'} # type: ignore
+
+ def list_by_account(
+ self,
+ resource_group_name: str,
+ account_name: str,
+ skip_token: Optional[str] = None,
+ filter: Optional[str] = None,
+ orderby: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.ShareSubscriptionList"]:
+ """List of available share subscriptions under an account.
+
+ List share subscriptions in an account.
+
+ :param resource_group_name: The resource group name.
+ :type resource_group_name: str
+ :param account_name: The name of the share account.
+ :type account_name: str
+ :param skip_token: Continuation Token.
+ :type skip_token: str
+ :param filter: Filters the results using OData syntax.
+ :type filter: str
+ :param orderby: Sorts the results using OData syntax.
+ :type orderby: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ShareSubscriptionList or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_share_management_client.models.ShareSubscriptionList]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ShareSubscriptionList"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_account.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'accountName': self._serialize.url("account_name", account_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ if filter is not None:
+ query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
+ if orderby is not None:
+ query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('ShareSubscriptionList', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.DataShareError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list_by_account.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions'} # type: ignore
diff --git a/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations_async/_share_operations_async.py b/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations/_shares_operations.py
similarity index 58%
rename from src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations_async/_share_operations_async.py
rename to src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations/_shares_operations.py
index 0c7c5ef51f1..53bb64e9d36 100644
--- a/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations_async/_share_operations_async.py
+++ b/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations/_shares_operations.py
@@ -5,23 +5,24 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-import datetime
-from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
-from azure.core.polling import AsyncNoPolling, AsyncPollingMethod, async_poller
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-class ShareOperations:
- """ShareOperations async operations.
+class SharesOperations:
+ """SharesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
@@ -42,6 +43,209 @@ def __init__(self, client, config, serializer, deserializer) -> None:
self._deserialize = deserializer
self._config = config
+ def list_synchronization_details(
+ self,
+ resource_group_name: str,
+ account_name: str,
+ share_name: str,
+ share_synchronization: "models.ShareSynchronization",
+ skip_token: Optional[str] = None,
+ filter: Optional[str] = None,
+ orderby: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.SynchronizationDetailsList"]:
+ """List data set level details for a share synchronization.
+
+ List synchronization details.
+
+ :param resource_group_name: The resource group name.
+ :type resource_group_name: str
+ :param account_name: The name of the share account.
+ :type account_name: str
+ :param share_name: The name of the share.
+ :type share_name: str
+ :param share_synchronization: Share Synchronization payload.
+ :type share_synchronization: ~data_share_management_client.models.ShareSynchronization
+ :param skip_token: Continuation token.
+ :type skip_token: str
+ :param filter: Filters the results using OData syntax.
+ :type filter: str
+ :param orderby: Sorts the results using OData syntax.
+ :type orderby: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either SynchronizationDetailsList or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_share_management_client.models.SynchronizationDetailsList]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.SynchronizationDetailsList"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ content_type = "application/json"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_synchronization_details.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'accountName': self._serialize.url("account_name", account_name, 'str'),
+ 'shareName': self._serialize.url("share_name", share_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ if filter is not None:
+ query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
+ if orderby is not None:
+ query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(share_synchronization, 'ShareSynchronization')
+ body_content_kwargs['content'] = body_content
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(share_synchronization, 'ShareSynchronization')
+ body_content_kwargs['content'] = body_content
+ request = self._client.get(url, query_parameters, header_parameters, **body_content_kwargs)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('SynchronizationDetailsList', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.DataShareError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list_synchronization_details.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/listSynchronizationDetails'} # type: ignore
+
+ def list_synchronizations(
+ self,
+ resource_group_name: str,
+ account_name: str,
+ share_name: str,
+ skip_token: Optional[str] = None,
+ filter: Optional[str] = None,
+ orderby: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.ShareSynchronizationList"]:
+ """List Synchronizations in a share.
+
+ List synchronizations of a share.
+
+ :param resource_group_name: The resource group name.
+ :type resource_group_name: str
+ :param account_name: The name of the share account.
+ :type account_name: str
+ :param share_name: The name of the share.
+ :type share_name: str
+ :param skip_token: Continuation token.
+ :type skip_token: str
+ :param filter: Filters the results using OData syntax.
+ :type filter: str
+ :param orderby: Sorts the results using OData syntax.
+ :type orderby: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ShareSynchronizationList or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_share_management_client.models.ShareSynchronizationList]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ShareSynchronizationList"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_synchronizations.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'accountName': self._serialize.url("account_name", account_name, 'str'),
+ 'shareName': self._serialize.url("share_name", share_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ if filter is not None:
+ query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
+ if orderby is not None:
+ query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('ShareSynchronizationList', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.DataShareError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list_synchronizations.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/listSynchronizations'} # type: ignore
+
async def get(
self,
resource_group_name: str,
@@ -49,9 +253,9 @@ async def get(
share_name: str,
**kwargs
) -> "models.Share":
- """Get a share.
+ """Get a specified share.
- Get a specified share.
+ Get a share.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -60,16 +264,20 @@ async def get(
:param share_name: The name of the share to retrieve.
:type share_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: Share or the result of cls(response)
+ :return: Share, or the result of cls(response)
:rtype: ~data_share_management_client.models.Share
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.Share"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
# Construct URL
- url = self.get.metadata['url']
+ url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -84,9 +292,8 @@ async def get(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
@@ -94,29 +301,27 @@ async def get(
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('Share', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}'}
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}'} # type: ignore
async def create(
self,
resource_group_name: str,
account_name: str,
share_name: str,
- description: Optional[str] = None,
- share_kind: Optional[Union[str, "models.ShareKind"]] = None,
- terms: Optional[str] = None,
+ share: "models.Share",
**kwargs
) -> "models.Share":
- """Create a share.
+ """Create a share in the given account.
- Create a share in the given account.
+ Create a share.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -124,26 +329,24 @@ async def create(
:type account_name: str
:param share_name: The name of the share.
:type share_name: str
- :param description: Share description.
- :type description: str
- :param share_kind: Share kind.
- :type share_kind: str or ~data_share_management_client.models.ShareKind
- :param terms: Share terms.
- :type terms: str
+ :param share: The share payload.
+ :type share: ~data_share_management_client.models.Share
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: Share or the result of cls(response)
- :rtype: ~data_share_management_client.models.Share or ~data_share_management_client.models.Share
+ :return: Share, or the result of cls(response)
+ :rtype: ~data_share_management_client.models.Share
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.Share"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
-
- _share = models.Share(description=description, share_kind=share_kind, terms=terms)
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
# Construct URL
- url = self.create.metadata['url']
+ url = self.create.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -159,23 +362,20 @@ async def create(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(_share, 'Share')
+ body_content = self._serialize.body(share, 'Share')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Share', pipeline_response)
@@ -183,10 +383,10 @@ async def create(
deserialized = self._deserialize('Share', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}'}
+ create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}'} # type: ignore
async def _delete_initial(
self,
@@ -194,13 +394,17 @@ async def _delete_initial(
account_name: str,
share_name: str,
**kwargs
- ) -> "models.OperationResponse":
- cls = kwargs.pop('cls', None) # type: ClsType["models.OperationResponse"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ ) -> Optional["models.OperationResponse"]:
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.OperationResponse"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
# Construct URL
- url = self._delete_initial.metadata['url']
+ url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -215,9 +419,8 @@ async def _delete_initial(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
@@ -225,28 +428,28 @@ async def _delete_initial(
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationResponse', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}'}
+ _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}'} # type: ignore
- async def delete(
+ async def begin_delete(
self,
resource_group_name: str,
account_name: str,
share_name: str,
**kwargs
- ) -> "models.OperationResponse":
- """Delete a share.
+ ) -> AsyncLROPoller["models.OperationResponse"]:
+ """Deletes a share.
- Deletes a share.
+ Delete a share.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -255,23 +458,33 @@ async def delete(
:param share_name: The name of the share.
:type share_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :return: An instance of LROPoller that returns OperationResponse
- :rtype: ~azure.core.polling.LROPoller[~data_share_management_client.models.OperationResponse]
-
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either OperationResponse or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~data_share_management_client.models.OperationResponse]
:raises ~azure.core.exceptions.HttpResponseError:
"""
- polling = kwargs.pop('polling', False) # type: Union[bool, AsyncPollingMethod]
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.OperationResponse"]
- raw_result = await self._delete_initial(
- resource_group_name=resource_group_name,
- account_name=account_name,
- share_name=share_name,
- cls=lambda x,y,z: x,
- **kwargs
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
)
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._delete_initial(
+ resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_name=share_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('OperationResponse', pipeline_response)
@@ -280,26 +493,39 @@ def get_long_running_output(pipeline_response):
return cls(pipeline_response, deserialized, {})
return deserialized
- lro_delay = kwargs.get(
- 'polling_interval',
- self._config.polling_interval
- )
- if polling is True: raise ValueError("polling being True is not valid because no default polling implemetation has been defined.")
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'accountName': self._serialize.url("account_name", account_name, 'str'),
+ 'shareName': self._serialize.url("share_name", share_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
- return await async_poller(self._client, raw_result, get_long_running_output, polling_method)
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}'}
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}'} # type: ignore
def list_by_account(
self,
resource_group_name: str,
account_name: str,
skip_token: Optional[str] = None,
+ filter: Optional[str] = None,
+ orderby: Optional[str] = None,
**kwargs
- ) -> "models.ShareList":
- """List shares in an account.
+ ) -> AsyncIterable["models.ShareList"]:
+ """List of available shares under an account.
- List of available shares under an account.
+ List shares in an account.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -307,242 +533,56 @@ def list_by_account(
:type account_name: str
:param skip_token: Continuation Token.
:type skip_token: str
+ :param filter: Filters the results using OData syntax.
+ :type filter: str
+ :param orderby: Sorts the results using OData syntax.
+ :type orderby: str
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: ShareList or the result of cls(response)
- :rtype: ~data_share_management_client.models.ShareList
+ :return: An iterator like instance of either ShareList or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_share_management_client.models.ShareList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ShareList"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
-
- def prepare_request(next_link=None):
- if not next_link:
- # Construct URL
- url = self.list_by_account.metadata['url']
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
- 'accountName': self._serialize.url("account_name", account_name, 'str'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- else:
- url = next_link
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
- if skip_token is not None:
- query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- # Construct and send request
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- async def extract_data(pipeline_response):
- deserialized = self._deserialize('ShareList', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, AsyncList(list_of_elem)
-
- async def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- error = self._deserialize(models.DataShareError, response)
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, model=error)
-
- return pipeline_response
-
- return AsyncItemPaged(
- get_next, extract_data
- )
- list_by_account.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares'}
-
- def list_synchronization(
- self,
- resource_group_name: str,
- account_name: str,
- share_name: str,
- skip_token: Optional[str] = None,
- **kwargs
- ) -> "models.ShareSynchronizationList":
- """List synchronizations of a share.
-
- List Synchronizations in a share.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param account_name: The name of the share account.
- :type account_name: str
- :param share_name: The name of the share.
- :type share_name: str
- :param skip_token: Continuation token.
- :type skip_token: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: ShareSynchronizationList or the result of cls(response)
- :rtype: ~data_share_management_client.models.ShareSynchronizationList
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.ShareSynchronizationList"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
def prepare_request(next_link=None):
- if not next_link:
- # Construct URL
- url = self.list_synchronization.metadata['url']
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
- 'accountName': self._serialize.url("account_name", account_name, 'str'),
- 'shareName': self._serialize.url("share_name", share_name, 'str'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- else:
- url = next_link
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
- if skip_token is not None:
- query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
-
# Construct headers
header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- # Construct and send request
- request = self._client.post(url, query_parameters, header_parameters)
- return request
-
- async def extract_data(pipeline_response):
- deserialized = self._deserialize('ShareSynchronizationList', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, AsyncList(list_of_elem)
-
- async def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- error = self._deserialize(models.DataShareError, response)
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, model=error)
-
- return pipeline_response
-
- return AsyncItemPaged(
- get_next, extract_data
- )
- list_synchronization.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/listSynchronizations'}
-
- def list_synchronization_detail(
- self,
- resource_group_name: str,
- account_name: str,
- share_name: str,
- skip_token: Optional[str] = None,
- consumer_email: Optional[str] = None,
- consumer_name: Optional[str] = None,
- consumer_tenant_name: Optional[str] = None,
- duration_ms: Optional[int] = None,
- end_time: Optional[datetime.datetime] = None,
- message: Optional[str] = None,
- start_time: Optional[datetime.datetime] = None,
- status: Optional[str] = None,
- synchronization_id: Optional[str] = None,
- **kwargs
- ) -> "models.SynchronizationDetailsList":
- """List synchronization details.
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- List data set level details for a share synchronization.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param account_name: The name of the share account.
- :type account_name: str
- :param share_name: The name of the share.
- :type share_name: str
- :param skip_token: Continuation token.
- :type skip_token: str
- :param consumer_email: Email of the user who created the synchronization.
- :type consumer_email: str
- :param consumer_name: Name of the user who created the synchronization.
- :type consumer_name: str
- :param consumer_tenant_name: Tenant name of the consumer who created the synchronization.
- :type consumer_tenant_name: str
- :param duration_ms: synchronization duration.
- :type duration_ms: int
- :param end_time: End time of synchronization.
- :type end_time: ~datetime.datetime
- :param message: message of synchronization.
- :type message: str
- :param start_time: start time of synchronization.
- :type start_time: ~datetime.datetime
- :param status: Raw Status.
- :type status: str
- :param synchronization_id: Synchronization id.
- :type synchronization_id: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: SynchronizationDetailsList or the result of cls(response)
- :rtype: ~data_share_management_client.models.SynchronizationDetailsList
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.SynchronizationDetailsList"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- _share_synchronization = models.ShareSynchronization(consumer_email=consumer_email, consumer_name=consumer_name, consumer_tenant_name=consumer_tenant_name, duration_ms=duration_ms, end_time=end_time, message=message, start_time=start_time, status=status, synchronization_id=synchronization_id)
- api_version = "2019-11-01"
- content_type = "application/json"
-
- def prepare_request(next_link=None):
if not next_link:
# Construct URL
- url = self.list_synchronization_detail.metadata['url']
+ url = self.list_by_account.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'accountName': self._serialize.url("account_name", account_name, 'str'),
- 'shareName': self._serialize.url("share_name", share_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ if filter is not None:
+ query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
+ if orderby is not None:
+ query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
- if skip_token is not None:
- query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- # Construct and send request
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(_share_synchronization, 'ShareSynchronization')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
- deserialized = self._deserialize('SynchronizationDetailsList', pipeline_response)
+ deserialized = self._deserialize('ShareList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
@@ -557,11 +597,11 @@ async def get_next(next_link=None):
if response.status_code not in [200]:
error = self._deserialize(models.DataShareError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
- list_synchronization_detail.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/listSynchronizationDetails'}
+ list_by_account.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares'} # type: ignore
diff --git a/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations_async/_synchronization_setting_operations_async.py b/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations/_synchronization_settings_operations.py
similarity index 68%
rename from src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations_async/_synchronization_setting_operations_async.py
rename to src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations/_synchronization_settings_operations.py
index 78a95bd6159..5aa3aad1e91 100644
--- a/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations_async/_synchronization_setting_operations_async.py
+++ b/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations/_synchronization_settings_operations.py
@@ -5,22 +5,24 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
-from azure.core.polling import AsyncNoPolling, AsyncPollingMethod, async_poller
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-class SynchronizationSettingOperations:
- """SynchronizationSettingOperations async operations.
+class SynchronizationSettingsOperations:
+ """SynchronizationSettingsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
@@ -49,9 +51,9 @@ async def get(
synchronization_setting_name: str,
**kwargs
) -> "models.SynchronizationSetting":
- """Get a synchronizationSetting in a share.
+ """Get synchronizationSetting in a share.
- Get synchronizationSetting in a share.
+ Get a synchronizationSetting in a share.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -62,16 +64,20 @@ async def get(
:param synchronization_setting_name: The name of the synchronizationSetting.
:type synchronization_setting_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: SynchronizationSetting or the result of cls(response)
+ :return: SynchronizationSetting, or the result of cls(response)
:rtype: ~data_share_management_client.models.SynchronizationSetting
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.SynchronizationSetting"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
# Construct URL
- url = self.get.metadata['url']
+ url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -87,9 +93,8 @@ async def get(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
@@ -97,15 +102,15 @@ async def get(
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('SynchronizationSetting', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/synchronizationSettings/{synchronizationSettingName}'}
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/synchronizationSettings/{synchronizationSettingName}'} # type: ignore
async def create(
self,
@@ -116,9 +121,9 @@ async def create(
synchronization_setting: "models.SynchronizationSetting",
**kwargs
) -> "models.SynchronizationSetting":
- """Create or update a synchronizationSetting.
+ """Adds a new synchronization setting to an existing share.
- Adds a new synchronization setting to an existing share.
+ Create a synchronizationSetting.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -131,17 +136,21 @@ async def create(
:param synchronization_setting: The new synchronization setting information.
:type synchronization_setting: ~data_share_management_client.models.SynchronizationSetting
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: SynchronizationSetting or the result of cls(response)
- :rtype: ~data_share_management_client.models.SynchronizationSetting or ~data_share_management_client.models.SynchronizationSetting
+ :return: SynchronizationSetting, or the result of cls(response)
+ :rtype: ~data_share_management_client.models.SynchronizationSetting
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.SynchronizationSetting"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
# Construct URL
- url = self.create.metadata['url']
+ url = self.create.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -158,23 +167,20 @@ async def create(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(synchronization_setting, 'SynchronizationSetting')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('SynchronizationSetting', pipeline_response)
@@ -182,10 +188,10 @@ async def create(
deserialized = self._deserialize('SynchronizationSetting', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/synchronizationSettings/{synchronizationSettingName}'}
+ create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/synchronizationSettings/{synchronizationSettingName}'} # type: ignore
async def _delete_initial(
self,
@@ -194,13 +200,17 @@ async def _delete_initial(
share_name: str,
synchronization_setting_name: str,
**kwargs
- ) -> "models.OperationResponse":
- cls = kwargs.pop('cls', None) # type: ClsType["models.OperationResponse"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ ) -> Optional["models.OperationResponse"]:
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.OperationResponse"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
# Construct URL
- url = self._delete_initial.metadata['url']
+ url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -216,9 +226,8 @@ async def _delete_initial(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
@@ -226,29 +235,29 @@ async def _delete_initial(
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationResponse', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/synchronizationSettings/{synchronizationSettingName}'}
+ _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/synchronizationSettings/{synchronizationSettingName}'} # type: ignore
- async def delete(
+ async def begin_delete(
self,
resource_group_name: str,
account_name: str,
share_name: str,
synchronization_setting_name: str,
**kwargs
- ) -> "models.OperationResponse":
- """Delete a synchronizationSetting in a share.
+ ) -> AsyncLROPoller["models.OperationResponse"]:
+ """Delete synchronizationSetting in a share.
- Delete synchronizationSetting in a share.
+ Delete a synchronizationSetting in a share.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -259,24 +268,34 @@ async def delete(
:param synchronization_setting_name: The name of the synchronizationSetting .
:type synchronization_setting_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :return: An instance of LROPoller that returns OperationResponse
- :rtype: ~azure.core.polling.LROPoller[~data_share_management_client.models.OperationResponse]
-
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either OperationResponse or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~data_share_management_client.models.OperationResponse]
:raises ~azure.core.exceptions.HttpResponseError:
"""
- polling = kwargs.pop('polling', False) # type: Union[bool, AsyncPollingMethod]
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.OperationResponse"]
- raw_result = await self._delete_initial(
- resource_group_name=resource_group_name,
- account_name=account_name,
- share_name=share_name,
- synchronization_setting_name=synchronization_setting_name,
- cls=lambda x,y,z: x,
- **kwargs
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
)
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._delete_initial(
+ resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_name=share_name,
+ synchronization_setting_name=synchronization_setting_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('OperationResponse', pipeline_response)
@@ -285,15 +304,27 @@ def get_long_running_output(pipeline_response):
return cls(pipeline_response, deserialized, {})
return deserialized
- lro_delay = kwargs.get(
- 'polling_interval',
- self._config.polling_interval
- )
- if polling is True: raise ValueError("polling being True is not valid because no default polling implemetation has been defined.")
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'accountName': self._serialize.url("account_name", account_name, 'str'),
+ 'shareName': self._serialize.url("share_name", share_name, 'str'),
+ 'synchronizationSettingName': self._serialize.url("synchronization_setting_name", synchronization_setting_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
- return await async_poller(self._client, raw_result, get_long_running_output, polling_method)
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/synchronizationSettings/{synchronizationSettingName}'}
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/synchronizationSettings/{synchronizationSettingName}'} # type: ignore
def list_by_share(
self,
@@ -302,7 +333,7 @@ def list_by_share(
share_name: str,
skip_token: Optional[str] = None,
**kwargs
- ) -> "models.SynchronizationSettingList":
+ ) -> AsyncIterable["models.SynchronizationSettingList"]:
"""List synchronizationSettings in a share.
List synchronizationSettings in a share.
@@ -316,18 +347,26 @@ def list_by_share(
:param skip_token: continuation token.
:type skip_token: str
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: SynchronizationSettingList or the result of cls(response)
- :rtype: ~data_share_management_client.models.SynchronizationSettingList
+ :return: An iterator like instance of either SynchronizationSettingList or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_share_management_client.models.SynchronizationSettingList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.SynchronizationSettingList"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
if not next_link:
# Construct URL
- url = self.list_by_share.metadata['url']
+ url = self.list_by_share.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -335,21 +374,17 @@ def prepare_request(next_link=None):
'shareName': self._serialize.url("share_name", share_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
- if skip_token is not None:
- query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- # Construct and send request
- request = self._client.get(url, query_parameters, header_parameters)
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
@@ -368,11 +403,11 @@ async def get_next(next_link=None):
if response.status_code not in [200]:
error = self._deserialize(models.DataShareError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
- list_by_share.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/synchronizationSettings'}
+ list_by_share.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/synchronizationSettings'} # type: ignore
diff --git a/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations_async/_trigger_operations_async.py b/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations/_triggers_operations.py
similarity index 62%
rename from src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations_async/_trigger_operations_async.py
rename to src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations/_triggers_operations.py
index 18598bf412e..1cd925ea702 100644
--- a/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations_async/_trigger_operations_async.py
+++ b/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations/_triggers_operations.py
@@ -5,22 +5,24 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
-from azure.core.polling import AsyncNoPolling, AsyncPollingMethod, async_poller
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-class TriggerOperations:
- """TriggerOperations async operations.
+class TriggersOperations:
+ """TriggersOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
@@ -49,9 +51,9 @@ async def get(
trigger_name: str,
**kwargs
) -> "models.Trigger":
- """Get a Trigger in a shareSubscription.
+ """Get Trigger in a shareSubscription.
- Get Trigger in a shareSubscription.
+ Get a Trigger in a shareSubscription.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -62,16 +64,20 @@ async def get(
:param trigger_name: The name of the trigger.
:type trigger_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: Trigger or the result of cls(response)
+ :return: Trigger, or the result of cls(response)
:rtype: ~data_share_management_client.models.Trigger
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.Trigger"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
# Construct URL
- url = self.get.metadata['url']
+ url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -87,9 +93,8 @@ async def get(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
@@ -97,15 +102,15 @@ async def get(
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('Trigger', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/triggers/{triggerName}'}
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/triggers/{triggerName}'} # type: ignore
async def _create_initial(
self,
@@ -117,12 +122,16 @@ async def _create_initial(
**kwargs
) -> "models.Trigger":
cls = kwargs.pop('cls', None) # type: ClsType["models.Trigger"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
# Construct URL
- url = self._create_initial.metadata['url']
+ url = self._create_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -139,23 +148,20 @@ async def _create_initial(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(trigger, 'Trigger')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Trigger', pipeline_response)
@@ -163,12 +169,12 @@ async def _create_initial(
deserialized = self._deserialize('Trigger', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- _create_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/triggers/{triggerName}'}
+ _create_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/triggers/{triggerName}'} # type: ignore
- async def create(
+ async def begin_create(
self,
resource_group_name: str,
account_name: str,
@@ -176,42 +182,52 @@ async def create(
trigger_name: str,
trigger: "models.Trigger",
**kwargs
- ) -> "models.Trigger":
- """Create a Trigger.
+ ) -> AsyncLROPoller["models.Trigger"]:
+ """This method creates a trigger for a share subscription.
- This method creates a trigger for a share subscription.
+ Create a Trigger.
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param account_name: The name of the share account.
:type account_name: str
:param share_subscription_name: The name of the share subscription which will hold the data set
- sink.
+ sink.
:type share_subscription_name: str
:param trigger_name: The name of the trigger.
:type trigger_name: str
:param trigger: Trigger details.
:type trigger: ~data_share_management_client.models.Trigger
:keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :return: An instance of LROPoller that returns Trigger
- :rtype: ~azure.core.polling.LROPoller[~data_share_management_client.models.Trigger]
-
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either Trigger or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~data_share_management_client.models.Trigger]
:raises ~azure.core.exceptions.HttpResponseError:
"""
- polling = kwargs.pop('polling', False) # type: Union[bool, AsyncPollingMethod]
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.Trigger"]
- raw_result = await self._create_initial(
- resource_group_name=resource_group_name,
- account_name=account_name,
- share_subscription_name=share_subscription_name,
- trigger_name=trigger_name,
- trigger=trigger,
- cls=lambda x,y,z: x,
- **kwargs
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
)
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._create_initial(
+ resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_subscription_name=share_subscription_name,
+ trigger_name=trigger_name,
+ trigger=trigger,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('Trigger', pipeline_response)
@@ -220,15 +236,27 @@ def get_long_running_output(pipeline_response):
return cls(pipeline_response, deserialized, {})
return deserialized
- lro_delay = kwargs.get(
- 'polling_interval',
- self._config.polling_interval
- )
- if polling is True: raise ValueError("polling being True is not valid because no default polling implemetation has been defined.")
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'accountName': self._serialize.url("account_name", account_name, 'str'),
+ 'shareSubscriptionName': self._serialize.url("share_subscription_name", share_subscription_name, 'str'),
+ 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
- return await async_poller(self._client, raw_result, get_long_running_output, polling_method)
- create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/triggers/{triggerName}'}
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/triggers/{triggerName}'} # type: ignore
async def _delete_initial(
self,
@@ -237,13 +265,17 @@ async def _delete_initial(
share_subscription_name: str,
trigger_name: str,
**kwargs
- ) -> "models.OperationResponse":
- cls = kwargs.pop('cls', None) # type: ClsType["models.OperationResponse"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ ) -> Optional["models.OperationResponse"]:
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.OperationResponse"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
# Construct URL
- url = self._delete_initial.metadata['url']
+ url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -259,9 +291,8 @@ async def _delete_initial(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
@@ -269,29 +300,29 @@ async def _delete_initial(
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationResponse', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/triggers/{triggerName}'}
+ _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/triggers/{triggerName}'} # type: ignore
- async def delete(
+ async def begin_delete(
self,
resource_group_name: str,
account_name: str,
share_subscription_name: str,
trigger_name: str,
**kwargs
- ) -> "models.OperationResponse":
- """Delete a Trigger in a shareSubscription.
+ ) -> AsyncLROPoller["models.OperationResponse"]:
+ """Delete Trigger in a shareSubscription.
- Delete Trigger in a shareSubscription.
+ Delete a Trigger in a shareSubscription.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -302,24 +333,34 @@ async def delete(
:param trigger_name: The name of the trigger.
:type trigger_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :return: An instance of LROPoller that returns OperationResponse
- :rtype: ~azure.core.polling.LROPoller[~data_share_management_client.models.OperationResponse]
-
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either OperationResponse or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~data_share_management_client.models.OperationResponse]
:raises ~azure.core.exceptions.HttpResponseError:
"""
- polling = kwargs.pop('polling', False) # type: Union[bool, AsyncPollingMethod]
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.OperationResponse"]
- raw_result = await self._delete_initial(
- resource_group_name=resource_group_name,
- account_name=account_name,
- share_subscription_name=share_subscription_name,
- trigger_name=trigger_name,
- cls=lambda x,y,z: x,
- **kwargs
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
)
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._delete_initial(
+ resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_subscription_name=share_subscription_name,
+ trigger_name=trigger_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('OperationResponse', pipeline_response)
@@ -328,15 +369,27 @@ def get_long_running_output(pipeline_response):
return cls(pipeline_response, deserialized, {})
return deserialized
- lro_delay = kwargs.get(
- 'polling_interval',
- self._config.polling_interval
- )
- if polling is True: raise ValueError("polling being True is not valid because no default polling implemetation has been defined.")
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'accountName': self._serialize.url("account_name", account_name, 'str'),
+ 'shareSubscriptionName': self._serialize.url("share_subscription_name", share_subscription_name, 'str'),
+ 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
- return await async_poller(self._client, raw_result, get_long_running_output, polling_method)
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/triggers/{triggerName}'}
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/triggers/{triggerName}'} # type: ignore
def list_by_share_subscription(
self,
@@ -345,7 +398,7 @@ def list_by_share_subscription(
share_subscription_name: str,
skip_token: Optional[str] = None,
**kwargs
- ) -> "models.TriggerList":
+ ) -> AsyncIterable["models.TriggerList"]:
"""List Triggers in a share subscription.
List Triggers in a share subscription.
@@ -359,18 +412,26 @@ def list_by_share_subscription(
:param skip_token: Continuation token.
:type skip_token: str
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: TriggerList or the result of cls(response)
- :rtype: ~data_share_management_client.models.TriggerList
+ :return: An iterator like instance of either TriggerList or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_share_management_client.models.TriggerList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerList"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
if not next_link:
# Construct URL
- url = self.list_by_share_subscription.metadata['url']
+ url = self.list_by_share_subscription.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -378,21 +439,17 @@ def prepare_request(next_link=None):
'shareSubscriptionName': self._serialize.url("share_subscription_name", share_subscription_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
- if skip_token is not None:
- query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- # Construct and send request
- request = self._client.get(url, query_parameters, header_parameters)
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
@@ -411,11 +468,11 @@ async def get_next(next_link=None):
if response.status_code not in [200]:
error = self._deserialize(models.DataShareError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
- list_by_share_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/triggers'}
+ list_by_share_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/triggers'} # type: ignore
diff --git a/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations_async/__init__.py b/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations_async/__init__.py
deleted file mode 100644
index c50fe5e6a33..00000000000
--- a/src/datashare/azext_datashare/vendored_sdks/datashare/aio/operations_async/__init__.py
+++ /dev/null
@@ -1,35 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-
-from ._account_operations_async import AccountOperations
-from ._consumer_invitation_operations_async import ConsumerInvitationOperations
-from ._data_set_operations_async import DataSetOperations
-from ._data_set_mapping_operations_async import DataSetMappingOperations
-from ._invitation_operations_async import InvitationOperations
-from ._operation_operations_async import OperationOperations
-from ._share_operations_async import ShareOperations
-from ._provider_share_subscription_operations_async import ProviderShareSubscriptionOperations
-from ._share_subscription_operations_async import ShareSubscriptionOperations
-from ._consumer_source_data_set_operations_async import ConsumerSourceDataSetOperations
-from ._synchronization_setting_operations_async import SynchronizationSettingOperations
-from ._trigger_operations_async import TriggerOperations
-
-__all__ = [
- 'AccountOperations',
- 'ConsumerInvitationOperations',
- 'DataSetOperations',
- 'DataSetMappingOperations',
- 'InvitationOperations',
- 'OperationOperations',
- 'ShareOperations',
- 'ProviderShareSubscriptionOperations',
- 'ShareSubscriptionOperations',
- 'ConsumerSourceDataSetOperations',
- 'SynchronizationSettingOperations',
- 'TriggerOperations',
-]
diff --git a/src/datashare/azext_datashare/vendored_sdks/datashare/models/__init__.py b/src/datashare/azext_datashare/vendored_sdks/datashare/models/__init__.py
index 237bc58e2ae..34e8f47a76e 100644
--- a/src/datashare/azext_datashare/vendored_sdks/datashare/models/__init__.py
+++ b/src/datashare/azext_datashare/vendored_sdks/datashare/models/__init__.py
@@ -66,15 +66,18 @@
from ._models_py3 import ShareSynchronizationList
from ._models_py3 import SourceShareSynchronizationSetting
from ._models_py3 import SourceShareSynchronizationSettingList
- from ._models_py3 import SqlDBTableDataSet
- from ._models_py3 import SqlDBTableDataSetMapping
+ from ._models_py3 import SqlDbTableDataSet
+ from ._models_py3 import SqlDbTableDataSetMapping
from ._models_py3 import SqlDwTableDataSet
from ._models_py3 import SqlDwTableDataSetMapping
+ from ._models_py3 import SynapseWorkspaceSqlPoolTableDataSet
+ from ._models_py3 import SynapseWorkspaceSqlPoolTableDataSetMapping
from ._models_py3 import SynchronizationDetails
from ._models_py3 import SynchronizationDetailsList
from ._models_py3 import SynchronizationSetting
from ._models_py3 import SynchronizationSettingList
from ._models_py3 import Synchronize
+ from ._models_py3 import SystemData
from ._models_py3 import Trigger
from ._models_py3 import TriggerList
except (SyntaxError, ImportError):
@@ -137,31 +140,41 @@
from ._models import ShareSynchronizationList # type: ignore
from ._models import SourceShareSynchronizationSetting # type: ignore
from ._models import SourceShareSynchronizationSettingList # type: ignore
- from ._models import SqlDBTableDataSet # type: ignore
- from ._models import SqlDBTableDataSetMapping # type: ignore
+ from ._models import SqlDbTableDataSet # type: ignore
+ from ._models import SqlDbTableDataSetMapping # type: ignore
from ._models import SqlDwTableDataSet # type: ignore
from ._models import SqlDwTableDataSetMapping # type: ignore
+ from ._models import SynapseWorkspaceSqlPoolTableDataSet # type: ignore
+ from ._models import SynapseWorkspaceSqlPoolTableDataSetMapping # type: ignore
from ._models import SynchronizationDetails # type: ignore
from ._models import SynchronizationDetailsList # type: ignore
from ._models import SynchronizationSetting # type: ignore
from ._models import SynchronizationSettingList # type: ignore
from ._models import Synchronize # type: ignore
+ from ._models import SystemData # type: ignore
from ._models import Trigger # type: ignore
from ._models import TriggerList # type: ignore
from ._data_share_management_client_enums import (
+ CreatedByType,
+ DataSetKind,
+ DataSetMappingKind,
DataSetMappingStatus,
DataSetType,
InvitationStatus,
- Kind,
+ LastModifiedByType,
OutputType,
ProvisioningState,
RecurrenceInterval,
ShareKind,
ShareSubscriptionStatus,
+ SourceShareSynchronizationSettingKind,
Status,
SynchronizationMode,
+ SynchronizationSettingKind,
+ TriggerKind,
TriggerStatus,
+ Type,
)
__all__ = [
@@ -224,27 +237,37 @@
'ShareSynchronizationList',
'SourceShareSynchronizationSetting',
'SourceShareSynchronizationSettingList',
- 'SqlDBTableDataSet',
- 'SqlDBTableDataSetMapping',
+ 'SqlDbTableDataSet',
+ 'SqlDbTableDataSetMapping',
'SqlDwTableDataSet',
'SqlDwTableDataSetMapping',
+ 'SynapseWorkspaceSqlPoolTableDataSet',
+ 'SynapseWorkspaceSqlPoolTableDataSetMapping',
'SynchronizationDetails',
'SynchronizationDetailsList',
'SynchronizationSetting',
'SynchronizationSettingList',
'Synchronize',
+ 'SystemData',
'Trigger',
'TriggerList',
+ 'CreatedByType',
+ 'DataSetKind',
+ 'DataSetMappingKind',
'DataSetMappingStatus',
'DataSetType',
'InvitationStatus',
- 'Kind',
+ 'LastModifiedByType',
'OutputType',
'ProvisioningState',
'RecurrenceInterval',
'ShareKind',
'ShareSubscriptionStatus',
+ 'SourceShareSynchronizationSettingKind',
'Status',
'SynchronizationMode',
+ 'SynchronizationSettingKind',
+ 'TriggerKind',
'TriggerStatus',
+ 'Type',
]
diff --git a/src/datashare/azext_datashare/vendored_sdks/datashare/models/_data_share_management_client_enums.py b/src/datashare/azext_datashare/vendored_sdks/datashare/models/_data_share_management_client_enums.py
index a2a515c4266..dc290d85bab 100644
--- a/src/datashare/azext_datashare/vendored_sdks/datashare/models/_data_share_management_client_enums.py
+++ b/src/datashare/azext_datashare/vendored_sdks/datashare/models/_data_share_management_client_enums.py
@@ -6,123 +6,198 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from enum import Enum
+from enum import Enum, EnumMeta
+from six import with_metaclass
+
+class _CaseInsensitiveEnumMeta(EnumMeta):
+ def __getitem__(self, name):
+ return super().__getitem__(name.upper())
+
+ def __getattr__(cls, name):
+ """Return the enum member matching `name`
+ We use __getattr__ instead of descriptors or inserting into the enum
+ class' __dict__ in order to support `name` and `value` being both
+ properties for enum members (which live in the class' __dict__) and
+ enum members themselves.
+ """
+ try:
+ return cls._member_map_[name.upper()]
+ except KeyError:
+ raise AttributeError(name)
+
+
+class CreatedByType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The type of identity that created the resource.
+ """
-class ProvisioningState(str, Enum):
- """Provisioning state of the Account
+ USER = "User"
+ APPLICATION = "Application"
+ MANAGED_IDENTITY = "ManagedIdentity"
+ KEY = "Key"
+
+class DataSetKind(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Kind of data set.
"""
- succeeded = "Succeeded"
- creating = "Creating"
- deleting = "Deleting"
- moving = "Moving"
- failed = "Failed"
+ BLOB = "Blob"
+ CONTAINER = "Container"
+ BLOB_FOLDER = "BlobFolder"
+ ADLS_GEN2_FILE_SYSTEM = "AdlsGen2FileSystem"
+ ADLS_GEN2_FOLDER = "AdlsGen2Folder"
+ ADLS_GEN2_FILE = "AdlsGen2File"
+ ADLS_GEN1_FOLDER = "AdlsGen1Folder"
+ ADLS_GEN1_FILE = "AdlsGen1File"
+ KUSTO_CLUSTER = "KustoCluster"
+ KUSTO_DATABASE = "KustoDatabase"
+ SQL_DB_TABLE = "SqlDBTable"
+ SQL_DW_TABLE = "SqlDWTable"
+ SYNAPSE_WORKSPACE_SQL_POOL_TABLE = "SynapseWorkspaceSqlPoolTable"
+
+class DataSetMappingKind(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Kind of data set mapping.
+ """
+
+ BLOB = "Blob"
+ CONTAINER = "Container"
+ BLOB_FOLDER = "BlobFolder"
+ ADLS_GEN2_FILE_SYSTEM = "AdlsGen2FileSystem"
+ ADLS_GEN2_FOLDER = "AdlsGen2Folder"
+ ADLS_GEN2_FILE = "AdlsGen2File"
+ KUSTO_CLUSTER = "KustoCluster"
+ KUSTO_DATABASE = "KustoDatabase"
+ SQL_DB_TABLE = "SqlDBTable"
+ SQL_DW_TABLE = "SqlDWTable"
+ SYNAPSE_WORKSPACE_SQL_POOL_TABLE = "SynapseWorkspaceSqlPoolTable"
+
+class DataSetMappingStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Gets the status of the data set mapping.
+ """
-class InvitationStatus(str, Enum):
+ OK = "Ok"
+ BROKEN = "Broken"
+
+class DataSetType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Type of data set
+ """
+
+ BLOB = "Blob"
+ CONTAINER = "Container"
+ BLOB_FOLDER = "BlobFolder"
+ ADLS_GEN2_FILE_SYSTEM = "AdlsGen2FileSystem"
+ ADLS_GEN2_FOLDER = "AdlsGen2Folder"
+ ADLS_GEN2_FILE = "AdlsGen2File"
+ ADLS_GEN1_FOLDER = "AdlsGen1Folder"
+ ADLS_GEN1_FILE = "AdlsGen1File"
+ KUSTO_CLUSTER = "KustoCluster"
+ KUSTO_DATABASE = "KustoDatabase"
+ SQL_DB_TABLE = "SqlDBTable"
+ SQL_DW_TABLE = "SqlDWTable"
+ SYNAPSE_WORKSPACE_SQL_POOL_TABLE = "SynapseWorkspaceSqlPoolTable"
+
+class InvitationStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The status of the invitation.
"""
- pending = "Pending"
- accepted = "Accepted"
- rejected = "Rejected"
- withdrawn = "Withdrawn"
+ PENDING = "Pending"
+ ACCEPTED = "Accepted"
+ REJECTED = "Rejected"
+ WITHDRAWN = "Withdrawn"
-class Kind(str, Enum):
- """Kind of data set.
+class LastModifiedByType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The type of identity that last modified the resource.
"""
- blob = "Blob"
- container = "Container"
- blob_folder = "BlobFolder"
- adls_gen2_file_system = "AdlsGen2FileSystem"
- adls_gen2_folder = "AdlsGen2Folder"
- adls_gen2_file = "AdlsGen2File"
- adls_gen1_folder = "AdlsGen1Folder"
- adls_gen1_file = "AdlsGen1File"
- kusto_cluster = "KustoCluster"
- kusto_database = "KustoDatabase"
- sql_db_table = "SqlDBTable"
- sql_dw_table = "SqlDWTable"
+ USER = "User"
+ APPLICATION = "Application"
+ MANAGED_IDENTITY = "ManagedIdentity"
+ KEY = "Key"
-class SynchronizationKind(str, Enum):
- schedule_based = "ScheduleBased"
+class OutputType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Type of output file
+ """
-class ShareKind(str, Enum):
- """Share kind.
+ CSV = "Csv"
+ PARQUET = "Parquet"
+
+class ProvisioningState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Provisioning state of the Account
"""
- copy_based = "CopyBased"
- in_place = "InPlace"
+ SUCCEEDED = "Succeeded"
+ CREATING = "Creating"
+ DELETING = "Deleting"
+ MOVING = "Moving"
+ FAILED = "Failed"
-class SynchronizationMode(str, Enum):
- """Synchronization mode
+class RecurrenceInterval(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Recurrence Interval
"""
- incremental = "Incremental"
- full_sync = "FullSync"
+ HOUR = "Hour"
+ DAY = "Day"
-class DataSetType(str, Enum):
- """Type of the data set
+class ShareKind(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Share kind.
"""
- blob = "Blob"
- container = "Container"
- blob_folder = "BlobFolder"
- adls_gen2_file_system = "AdlsGen2FileSystem"
- adls_gen2_folder = "AdlsGen2Folder"
- adls_gen2_file = "AdlsGen2File"
- adls_gen1_folder = "AdlsGen1Folder"
- adls_gen1_file = "AdlsGen1File"
- kusto_cluster = "KustoCluster"
- kusto_database = "KustoDatabase"
- sql_db_table = "SqlDBTable"
- sql_dw_table = "SqlDWTable"
+ COPY_BASED = "CopyBased"
+ IN_PLACE = "InPlace"
-class ShareSubscriptionStatus(str, Enum):
+class ShareSubscriptionStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Gets the status of share subscription
"""
- active = "Active"
- revoked = "Revoked"
- source_deleted = "SourceDeleted"
- revoking = "Revoking"
+ ACTIVE = "Active"
+ REVOKED = "Revoked"
+ SOURCE_DELETED = "SourceDeleted"
+ REVOKING = "Revoking"
-class Status(str, Enum):
+class SourceShareSynchronizationSettingKind(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Kind of synchronization setting on share.
+ """
+
+ SCHEDULE_BASED = "ScheduleBased"
+
+class Status(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Operation state of the long running operation.
"""
- accepted = "Accepted"
- in_progress = "InProgress"
- transient_failure = "TransientFailure"
- succeeded = "Succeeded"
- failed = "Failed"
- canceled = "Canceled"
+ ACCEPTED = "Accepted"
+ IN_PROGRESS = "InProgress"
+ TRANSIENT_FAILURE = "TransientFailure"
+ SUCCEEDED = "Succeeded"
+ FAILED = "Failed"
+ CANCELED = "Canceled"
-class RecurrenceInterval(str, Enum):
- """Recurrence Interval
+class SynchronizationMode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Synchronization mode
"""
- hour = "Hour"
- day = "Day"
+ INCREMENTAL = "Incremental"
+ FULL_SYNC = "FullSync"
-class TriggerStatus(str, Enum):
- """Gets the trigger state
+class SynchronizationSettingKind(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Kind of synchronization setting.
"""
- active = "Active"
- inactive = "Inactive"
- source_synchronization_setting_deleted = "SourceSynchronizationSettingDeleted"
+ SCHEDULE_BASED = "ScheduleBased"
-class DataSetMappingStatus(str, Enum):
- """Gets the status of the data set mapping.
+class TriggerKind(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Kind of synchronization on trigger.
+ """
+
+ SCHEDULE_BASED = "ScheduleBased"
+
+class TriggerStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Gets the trigger state
"""
- ok = "Ok"
- broken = "Broken"
+ ACTIVE = "Active"
+ INACTIVE = "Inactive"
+ SOURCE_SYNCHRONIZATION_SETTING_DELETED = "SourceSynchronizationSettingDeleted"
-class OutputType(str, Enum):
- """File output type
+class Type(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Identity Type
"""
- csv = "Csv"
- parquet = "Parquet"
+ SYSTEM_ASSIGNED = "SystemAssigned"
diff --git a/src/datashare/azext_datashare/vendored_sdks/datashare/models/_models.py b/src/datashare/azext_datashare/vendored_sdks/datashare/models/_models.py
index bee5d5e4df6..aec387e7dd1 100644
--- a/src/datashare/azext_datashare/vendored_sdks/datashare/models/_models.py
+++ b/src/datashare/azext_datashare/vendored_sdks/datashare/models/_models.py
@@ -10,35 +10,79 @@
import msrest.serialization
-class DefaultDto(msrest.serialization.Model):
+class ProxyDto(msrest.serialization.Model):
+ """Base data transfer object implementation for proxy resources.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: The resource id of the azure resource.
+ :vartype id: str
+ :ivar name: Name of the azure resource.
+ :vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
+ :ivar type: Type of the azure resource.
+ :vartype type: str
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'system_data': {'readonly': True},
+ 'type': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'type': {'key': 'type', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ProxyDto, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.system_data = None
+ self.type = None
+
+
+class DefaultDto(ProxyDto):
"""Base data transfer object implementation for default resources.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: The resource id of the azure resource.
:vartype id: str
- :param location: Location of the azure resource.
- :type location: str
:ivar name: Name of the azure resource.
:vartype name: str
- :param tags: A set of tags. Tags on the azure resource.
- :type tags: dict[str, str]
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
+ :param location: Location of the azure resource.
+ :type location: str
+ :param tags: A set of tags. Tags on the azure resource.
+ :type tags: dict[str, str]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
- 'location': {'key': 'location', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
- 'tags': {'key': 'tags', 'type': '{str}'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
}
def __init__(
@@ -46,11 +90,8 @@ def __init__(
**kwargs
):
super(DefaultDto, self).__init__(**kwargs)
- self.id = None
self.location = kwargs.get('location', None)
- self.name = None
self.tags = kwargs.get('tags', None)
- self.type = None
class Account(DefaultDto):
@@ -62,20 +103,22 @@ class Account(DefaultDto):
:ivar id: The resource id of the azure resource.
:vartype id: str
- :param location: Location of the azure resource.
- :type location: str
:ivar name: Name of the azure resource.
:vartype name: str
- :param tags: A set of tags. Tags on the azure resource.
- :type tags: dict[str, str]
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
+ :param location: Location of the azure resource.
+ :type location: str
+ :param tags: A set of tags. Tags on the azure resource.
+ :type tags: dict[str, str]
:param identity: Required. Identity Info on the Account.
:type identity: ~data_share_management_client.models.Identity
:ivar created_at: Time at which the account was created.
:vartype created_at: ~datetime.datetime
:ivar provisioning_state: Provisioning state of the Account. Possible values include:
- 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed'.
+ "Succeeded", "Creating", "Deleting", "Moving", "Failed".
:vartype provisioning_state: str or ~data_share_management_client.models.ProvisioningState
:ivar user_email: Email of the user who created the resource.
:vartype user_email: str
@@ -86,6 +129,7 @@ class Account(DefaultDto):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'identity': {'required': True},
'created_at': {'readonly': True},
@@ -96,10 +140,11 @@ class Account(DefaultDto):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
- 'location': {'key': 'location', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
- 'tags': {'key': 'tags', 'type': '{str}'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
'identity': {'key': 'identity', 'type': 'Identity'},
'created_at': {'key': 'properties.createdAt', 'type': 'iso-8601'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
@@ -112,7 +157,7 @@ def __init__(
**kwargs
):
super(Account, self).__init__(**kwargs)
- self.identity = kwargs.get('identity', None)
+ self.identity = kwargs['identity']
self.created_at = None
self.provisioning_state = None
self.user_email = None
@@ -145,7 +190,7 @@ def __init__(
):
super(AccountList, self).__init__(**kwargs)
self.next_link = kwargs.get('next_link', None)
- self.value = kwargs.get('value', None)
+ self.value = kwargs['value']
class AccountUpdateParameters(msrest.serialization.Model):
@@ -167,46 +212,11 @@ def __init__(
self.tags = kwargs.get('tags', None)
-class ProxyDto(msrest.serialization.Model):
- """Base data transfer object implementation for proxy resources.
-
- Variables are only populated by the server, and will be ignored when sending a request.
-
- :ivar id: The resource id of the azure resource.
- :vartype id: str
- :ivar name: Name of the azure resource.
- :vartype name: str
- :ivar type: Type of the azure resource.
- :vartype type: str
- """
-
- _validation = {
- 'id': {'readonly': True},
- 'name': {'readonly': True},
- 'type': {'readonly': True},
- }
-
- _attribute_map = {
- 'id': {'key': 'id', 'type': 'str'},
- 'name': {'key': 'name', 'type': 'str'},
- 'type': {'key': 'type', 'type': 'str'},
- }
-
- def __init__(
- self,
- **kwargs
- ):
- super(ProxyDto, self).__init__(**kwargs)
- self.id = None
- self.name = None
- self.type = None
-
-
class DataSet(ProxyDto):
"""A DataSet data transfer object.
You probably want to use the sub-classes and not this class directly. Known
- sub-classes are: AdlsGen1FileDataSet, AdlsGen1FolderDataSet, AdlsGen2FileDataSet, AdlsGen2FileSystemDataSet, AdlsGen2FolderDataSet, BlobDataSet, BlobFolderDataSet, BlobContainerDataSet, KustoClusterDataSet, KustoDatabaseDataSet, SqlDBTableDataSet, SqlDwTableDataSet.
+ sub-classes are: AdlsGen1FileDataSet, AdlsGen1FolderDataSet, AdlsGen2FileDataSet, AdlsGen2FileSystemDataSet, AdlsGen2FolderDataSet, BlobDataSet, BlobFolderDataSet, BlobContainerDataSet, KustoClusterDataSet, KustoDatabaseDataSet, SqlDbTableDataSet, SqlDwTableDataSet, SynapseWorkspaceSqlPoolTableDataSet.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -216,18 +226,21 @@ class DataSet(ProxyDto):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set.Constant filled by server. Possible values include:
- 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder', 'AdlsGen2File',
- 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase', 'SqlDBTable', 'SqlDWTable',
- 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File",
+ "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetKind
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
}
@@ -235,12 +248,13 @@ class DataSet(ProxyDto):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
}
_subtype_map = {
- 'kind': {'AdlsGen1File': 'AdlsGen1FileDataSet', 'AdlsGen1Folder': 'AdlsGen1FolderDataSet', 'AdlsGen2File': 'AdlsGen2FileDataSet', 'AdlsGen2FileSystem': 'AdlsGen2FileSystemDataSet', 'AdlsGen2Folder': 'AdlsGen2FolderDataSet', 'Blob': 'BlobDataSet', 'BlobFolder': 'BlobFolderDataSet', 'Container': 'BlobContainerDataSet', 'KustoCluster': 'KustoClusterDataSet', 'KustoDatabase': 'KustoDatabaseDataSet', 'SqlDBTable': 'SqlDBTableDataSet', 'SqlDWTable': 'SqlDwTableDataSet'}
+ 'kind': {'AdlsGen1File': 'AdlsGen1FileDataSet', 'AdlsGen1Folder': 'AdlsGen1FolderDataSet', 'AdlsGen2File': 'AdlsGen2FileDataSet', 'AdlsGen2FileSystem': 'AdlsGen2FileSystemDataSet', 'AdlsGen2Folder': 'AdlsGen2FolderDataSet', 'Blob': 'BlobDataSet', 'BlobFolder': 'BlobFolderDataSet', 'Container': 'BlobContainerDataSet', 'KustoCluster': 'KustoClusterDataSet', 'KustoDatabase': 'KustoDatabaseDataSet', 'SqlDBTable': 'SqlDbTableDataSet', 'SqlDWTable': 'SqlDwTableDataSet', 'SynapseWorkspaceSqlPoolTable': 'SynapseWorkspaceSqlPoolTableDataSet'}
}
def __init__(
@@ -248,7 +262,7 @@ def __init__(
**kwargs
):
super(DataSet, self).__init__(**kwargs)
- self.kind = 'DataSet'
+ self.kind = 'DataSet' # type: str
class AdlsGen1FileDataSet(DataSet):
@@ -262,13 +276,15 @@ class AdlsGen1FileDataSet(DataSet):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set.Constant filled by server. Possible values include:
- 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder', 'AdlsGen2File',
- 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase', 'SqlDBTable', 'SqlDWTable',
- 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File",
+ "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetKind
:param account_name: Required. The ADLS account name.
:type account_name: str
:ivar data_set_id: Unique id for identifying a data set resource.
@@ -286,6 +302,7 @@ class AdlsGen1FileDataSet(DataSet):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'account_name': {'required': True},
@@ -299,6 +316,7 @@ class AdlsGen1FileDataSet(DataSet):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'account_name': {'key': 'properties.accountName', 'type': 'str'},
@@ -314,13 +332,13 @@ def __init__(
**kwargs
):
super(AdlsGen1FileDataSet, self).__init__(**kwargs)
- self.kind = 'AdlsGen1File'
- self.account_name = kwargs.get('account_name', None)
+ self.kind = 'AdlsGen1File' # type: str
+ self.account_name = kwargs['account_name']
self.data_set_id = None
- self.file_name = kwargs.get('file_name', None)
- self.folder_path = kwargs.get('folder_path', None)
- self.resource_group = kwargs.get('resource_group', None)
- self.subscription_id = kwargs.get('subscription_id', None)
+ self.file_name = kwargs['file_name']
+ self.folder_path = kwargs['folder_path']
+ self.resource_group = kwargs['resource_group']
+ self.subscription_id = kwargs['subscription_id']
class AdlsGen1FolderDataSet(DataSet):
@@ -334,13 +352,15 @@ class AdlsGen1FolderDataSet(DataSet):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set.Constant filled by server. Possible values include:
- 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder', 'AdlsGen2File',
- 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase', 'SqlDBTable', 'SqlDWTable',
- 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File",
+ "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetKind
:param account_name: Required. The ADLS account name.
:type account_name: str
:ivar data_set_id: Unique id for identifying a data set resource.
@@ -356,6 +376,7 @@ class AdlsGen1FolderDataSet(DataSet):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'account_name': {'required': True},
@@ -368,6 +389,7 @@ class AdlsGen1FolderDataSet(DataSet):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'account_name': {'key': 'properties.accountName', 'type': 'str'},
@@ -382,12 +404,12 @@ def __init__(
**kwargs
):
super(AdlsGen1FolderDataSet, self).__init__(**kwargs)
- self.kind = 'AdlsGen1Folder'
- self.account_name = kwargs.get('account_name', None)
+ self.kind = 'AdlsGen1Folder' # type: str
+ self.account_name = kwargs['account_name']
self.data_set_id = None
- self.folder_path = kwargs.get('folder_path', None)
- self.resource_group = kwargs.get('resource_group', None)
- self.subscription_id = kwargs.get('subscription_id', None)
+ self.folder_path = kwargs['folder_path']
+ self.resource_group = kwargs['resource_group']
+ self.subscription_id = kwargs['subscription_id']
class AdlsGen2FileDataSet(DataSet):
@@ -401,13 +423,15 @@ class AdlsGen2FileDataSet(DataSet):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set.Constant filled by server. Possible values include:
- 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder', 'AdlsGen2File',
- 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase', 'SqlDBTable', 'SqlDWTable',
- 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File",
+ "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetKind
:ivar data_set_id: Unique id for identifying a data set resource.
:vartype data_set_id: str
:param file_path: Required. File path within the file system.
@@ -425,6 +449,7 @@ class AdlsGen2FileDataSet(DataSet):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'data_set_id': {'readonly': True},
@@ -438,6 +463,7 @@ class AdlsGen2FileDataSet(DataSet):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'},
@@ -453,20 +479,20 @@ def __init__(
**kwargs
):
super(AdlsGen2FileDataSet, self).__init__(**kwargs)
- self.kind = 'AdlsGen2File'
+ self.kind = 'AdlsGen2File' # type: str
self.data_set_id = None
- self.file_path = kwargs.get('file_path', None)
- self.file_system = kwargs.get('file_system', None)
- self.resource_group = kwargs.get('resource_group', None)
- self.storage_account_name = kwargs.get('storage_account_name', None)
- self.subscription_id = kwargs.get('subscription_id', None)
+ self.file_path = kwargs['file_path']
+ self.file_system = kwargs['file_system']
+ self.resource_group = kwargs['resource_group']
+ self.storage_account_name = kwargs['storage_account_name']
+ self.subscription_id = kwargs['subscription_id']
class DataSetMapping(ProxyDto):
"""A data set mapping data transfer object.
You probably want to use the sub-classes and not this class directly. Known
- sub-classes are: AdlsGen2FileDataSetMapping, AdlsGen2FileSystemDataSetMapping, AdlsGen2FolderDataSetMapping, BlobDataSetMapping, BlobFolderDataSetMapping, BlobContainerDataSetMapping, KustoClusterDataSetMapping, KustoDatabaseDataSetMapping, SqlDBTableDataSetMapping, SqlDwTableDataSetMapping.
+ sub-classes are: AdlsGen2FileDataSetMapping, AdlsGen2FileSystemDataSetMapping, AdlsGen2FolderDataSetMapping, BlobDataSetMapping, BlobFolderDataSetMapping, BlobContainerDataSetMapping, KustoClusterDataSetMapping, KustoDatabaseDataSetMapping, SqlDbTableDataSetMapping, SqlDwTableDataSetMapping, SynapseWorkspaceSqlPoolTableDataSetMapping.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -476,18 +502,21 @@ class DataSetMapping(ProxyDto):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set mapping.Constant filled by server. Possible values
- include: 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder',
- 'AdlsGen2File', 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase',
- 'SqlDBTable', 'SqlDWTable', 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder",
+ "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetMappingKind
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
}
@@ -495,12 +524,13 @@ class DataSetMapping(ProxyDto):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
}
_subtype_map = {
- 'kind': {'AdlsGen2File': 'AdlsGen2FileDataSetMapping', 'AdlsGen2FileSystem': 'AdlsGen2FileSystemDataSetMapping', 'AdlsGen2Folder': 'AdlsGen2FolderDataSetMapping', 'Blob': 'BlobDataSetMapping', 'BlobFolder': 'BlobFolderDataSetMapping', 'Container': 'BlobContainerDataSetMapping', 'KustoCluster': 'KustoClusterDataSetMapping', 'KustoDatabase': 'KustoDatabaseDataSetMapping', 'SqlDBTable': 'SqlDBTableDataSetMapping', 'SqlDWTable': 'SqlDwTableDataSetMapping'}
+ 'kind': {'AdlsGen2File': 'AdlsGen2FileDataSetMapping', 'AdlsGen2FileSystem': 'AdlsGen2FileSystemDataSetMapping', 'AdlsGen2Folder': 'AdlsGen2FolderDataSetMapping', 'Blob': 'BlobDataSetMapping', 'BlobFolder': 'BlobFolderDataSetMapping', 'Container': 'BlobContainerDataSetMapping', 'KustoCluster': 'KustoClusterDataSetMapping', 'KustoDatabase': 'KustoDatabaseDataSetMapping', 'SqlDBTable': 'SqlDbTableDataSetMapping', 'SqlDWTable': 'SqlDwTableDataSetMapping', 'SynapseWorkspaceSqlPoolTable': 'SynapseWorkspaceSqlPoolTableDataSetMapping'}
}
def __init__(
@@ -508,7 +538,7 @@ def __init__(
**kwargs
):
super(DataSetMapping, self).__init__(**kwargs)
- self.kind = 'DataSetMapping'
+ self.kind = 'DataSetMapping' # type: str
class AdlsGen2FileDataSetMapping(DataSetMapping):
@@ -522,27 +552,29 @@ class AdlsGen2FileDataSetMapping(DataSetMapping):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set mapping.Constant filled by server. Possible values
- include: 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder',
- 'AdlsGen2File', 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase',
- 'SqlDBTable', 'SqlDWTable', 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder",
+ "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetMappingKind
:param data_set_id: Required. The id of the source data set.
:type data_set_id: str
:ivar data_set_mapping_status: Gets the status of the data set mapping. Possible values
- include: 'Ok', 'Broken'.
+ include: "Ok", "Broken".
:vartype data_set_mapping_status: str or
~data_share_management_client.models.DataSetMappingStatus
:param file_path: Required. File path within the file system.
:type file_path: str
:param file_system: Required. File system to which the file belongs.
:type file_system: str
- :param output_type: Type of output file. Possible values include: 'Csv', 'Parquet'.
+ :param output_type: Type of output file. Possible values include: "Csv", "Parquet".
:type output_type: str or ~data_share_management_client.models.OutputType
:ivar provisioning_state: Provisioning state of the data set mapping. Possible values include:
- 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed'.
+ "Succeeded", "Creating", "Deleting", "Moving", "Failed".
:vartype provisioning_state: str or ~data_share_management_client.models.ProvisioningState
:param resource_group: Required. Resource group of storage account.
:type resource_group: str
@@ -555,6 +587,7 @@ class AdlsGen2FileDataSetMapping(DataSetMapping):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'data_set_id': {'required': True},
@@ -570,6 +603,7 @@ class AdlsGen2FileDataSetMapping(DataSetMapping):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'},
@@ -588,16 +622,16 @@ def __init__(
**kwargs
):
super(AdlsGen2FileDataSetMapping, self).__init__(**kwargs)
- self.kind = 'AdlsGen2File'
- self.data_set_id = kwargs.get('data_set_id', None)
+ self.kind = 'AdlsGen2File' # type: str
+ self.data_set_id = kwargs['data_set_id']
self.data_set_mapping_status = None
- self.file_path = kwargs.get('file_path', None)
- self.file_system = kwargs.get('file_system', None)
+ self.file_path = kwargs['file_path']
+ self.file_system = kwargs['file_system']
self.output_type = kwargs.get('output_type', None)
self.provisioning_state = None
- self.resource_group = kwargs.get('resource_group', None)
- self.storage_account_name = kwargs.get('storage_account_name', None)
- self.subscription_id = kwargs.get('subscription_id', None)
+ self.resource_group = kwargs['resource_group']
+ self.storage_account_name = kwargs['storage_account_name']
+ self.subscription_id = kwargs['subscription_id']
class AdlsGen2FileSystemDataSet(DataSet):
@@ -611,13 +645,15 @@ class AdlsGen2FileSystemDataSet(DataSet):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set.Constant filled by server. Possible values include:
- 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder', 'AdlsGen2File',
- 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase', 'SqlDBTable', 'SqlDWTable',
- 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File",
+ "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetKind
:ivar data_set_id: Unique id for identifying a data set resource.
:vartype data_set_id: str
:param file_system: Required. The file system name.
@@ -633,6 +669,7 @@ class AdlsGen2FileSystemDataSet(DataSet):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'data_set_id': {'readonly': True},
@@ -645,6 +682,7 @@ class AdlsGen2FileSystemDataSet(DataSet):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'},
@@ -659,12 +697,12 @@ def __init__(
**kwargs
):
super(AdlsGen2FileSystemDataSet, self).__init__(**kwargs)
- self.kind = 'AdlsGen2FileSystem'
+ self.kind = 'AdlsGen2FileSystem' # type: str
self.data_set_id = None
- self.file_system = kwargs.get('file_system', None)
- self.resource_group = kwargs.get('resource_group', None)
- self.storage_account_name = kwargs.get('storage_account_name', None)
- self.subscription_id = kwargs.get('subscription_id', None)
+ self.file_system = kwargs['file_system']
+ self.resource_group = kwargs['resource_group']
+ self.storage_account_name = kwargs['storage_account_name']
+ self.subscription_id = kwargs['subscription_id']
class AdlsGen2FileSystemDataSetMapping(DataSetMapping):
@@ -678,23 +716,25 @@ class AdlsGen2FileSystemDataSetMapping(DataSetMapping):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set mapping.Constant filled by server. Possible values
- include: 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder',
- 'AdlsGen2File', 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase',
- 'SqlDBTable', 'SqlDWTable', 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder",
+ "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetMappingKind
:param data_set_id: Required. The id of the source data set.
:type data_set_id: str
:ivar data_set_mapping_status: Gets the status of the data set mapping. Possible values
- include: 'Ok', 'Broken'.
+ include: "Ok", "Broken".
:vartype data_set_mapping_status: str or
~data_share_management_client.models.DataSetMappingStatus
:param file_system: Required. The file system name.
:type file_system: str
:ivar provisioning_state: Provisioning state of the data set mapping. Possible values include:
- 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed'.
+ "Succeeded", "Creating", "Deleting", "Moving", "Failed".
:vartype provisioning_state: str or ~data_share_management_client.models.ProvisioningState
:param resource_group: Required. Resource group of storage account.
:type resource_group: str
@@ -707,6 +747,7 @@ class AdlsGen2FileSystemDataSetMapping(DataSetMapping):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'data_set_id': {'required': True},
@@ -721,6 +762,7 @@ class AdlsGen2FileSystemDataSetMapping(DataSetMapping):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'},
@@ -737,14 +779,14 @@ def __init__(
**kwargs
):
super(AdlsGen2FileSystemDataSetMapping, self).__init__(**kwargs)
- self.kind = 'AdlsGen2FileSystem'
- self.data_set_id = kwargs.get('data_set_id', None)
+ self.kind = 'AdlsGen2FileSystem' # type: str
+ self.data_set_id = kwargs['data_set_id']
self.data_set_mapping_status = None
- self.file_system = kwargs.get('file_system', None)
+ self.file_system = kwargs['file_system']
self.provisioning_state = None
- self.resource_group = kwargs.get('resource_group', None)
- self.storage_account_name = kwargs.get('storage_account_name', None)
- self.subscription_id = kwargs.get('subscription_id', None)
+ self.resource_group = kwargs['resource_group']
+ self.storage_account_name = kwargs['storage_account_name']
+ self.subscription_id = kwargs['subscription_id']
class AdlsGen2FolderDataSet(DataSet):
@@ -758,13 +800,15 @@ class AdlsGen2FolderDataSet(DataSet):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set.Constant filled by server. Possible values include:
- 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder', 'AdlsGen2File',
- 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase', 'SqlDBTable', 'SqlDWTable',
- 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File",
+ "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetKind
:ivar data_set_id: Unique id for identifying a data set resource.
:vartype data_set_id: str
:param file_system: Required. File system to which the folder belongs.
@@ -782,6 +826,7 @@ class AdlsGen2FolderDataSet(DataSet):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'data_set_id': {'readonly': True},
@@ -795,6 +840,7 @@ class AdlsGen2FolderDataSet(DataSet):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'},
@@ -810,13 +856,13 @@ def __init__(
**kwargs
):
super(AdlsGen2FolderDataSet, self).__init__(**kwargs)
- self.kind = 'AdlsGen2Folder'
+ self.kind = 'AdlsGen2Folder' # type: str
self.data_set_id = None
- self.file_system = kwargs.get('file_system', None)
- self.folder_path = kwargs.get('folder_path', None)
- self.resource_group = kwargs.get('resource_group', None)
- self.storage_account_name = kwargs.get('storage_account_name', None)
- self.subscription_id = kwargs.get('subscription_id', None)
+ self.file_system = kwargs['file_system']
+ self.folder_path = kwargs['folder_path']
+ self.resource_group = kwargs['resource_group']
+ self.storage_account_name = kwargs['storage_account_name']
+ self.subscription_id = kwargs['subscription_id']
class AdlsGen2FolderDataSetMapping(DataSetMapping):
@@ -830,17 +876,19 @@ class AdlsGen2FolderDataSetMapping(DataSetMapping):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set mapping.Constant filled by server. Possible values
- include: 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder',
- 'AdlsGen2File', 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase',
- 'SqlDBTable', 'SqlDWTable', 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder",
+ "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetMappingKind
:param data_set_id: Required. The id of the source data set.
:type data_set_id: str
:ivar data_set_mapping_status: Gets the status of the data set mapping. Possible values
- include: 'Ok', 'Broken'.
+ include: "Ok", "Broken".
:vartype data_set_mapping_status: str or
~data_share_management_client.models.DataSetMappingStatus
:param file_system: Required. File system to which the folder belongs.
@@ -848,7 +896,7 @@ class AdlsGen2FolderDataSetMapping(DataSetMapping):
:param folder_path: Required. Folder path within the file system.
:type folder_path: str
:ivar provisioning_state: Provisioning state of the data set mapping. Possible values include:
- 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed'.
+ "Succeeded", "Creating", "Deleting", "Moving", "Failed".
:vartype provisioning_state: str or ~data_share_management_client.models.ProvisioningState
:param resource_group: Required. Resource group of storage account.
:type resource_group: str
@@ -861,6 +909,7 @@ class AdlsGen2FolderDataSetMapping(DataSetMapping):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'data_set_id': {'required': True},
@@ -876,6 +925,7 @@ class AdlsGen2FolderDataSetMapping(DataSetMapping):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'},
@@ -893,15 +943,15 @@ def __init__(
**kwargs
):
super(AdlsGen2FolderDataSetMapping, self).__init__(**kwargs)
- self.kind = 'AdlsGen2Folder'
- self.data_set_id = kwargs.get('data_set_id', None)
+ self.kind = 'AdlsGen2Folder' # type: str
+ self.data_set_id = kwargs['data_set_id']
self.data_set_mapping_status = None
- self.file_system = kwargs.get('file_system', None)
- self.folder_path = kwargs.get('folder_path', None)
+ self.file_system = kwargs['file_system']
+ self.folder_path = kwargs['folder_path']
self.provisioning_state = None
- self.resource_group = kwargs.get('resource_group', None)
- self.storage_account_name = kwargs.get('storage_account_name', None)
- self.subscription_id = kwargs.get('subscription_id', None)
+ self.resource_group = kwargs['resource_group']
+ self.storage_account_name = kwargs['storage_account_name']
+ self.subscription_id = kwargs['subscription_id']
class BlobContainerDataSet(DataSet):
@@ -915,13 +965,15 @@ class BlobContainerDataSet(DataSet):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set.Constant filled by server. Possible values include:
- 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder', 'AdlsGen2File',
- 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase', 'SqlDBTable', 'SqlDWTable',
- 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File",
+ "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetKind
:param container_name: Required. BLOB Container name.
:type container_name: str
:ivar data_set_id: Unique id for identifying a data set resource.
@@ -937,6 +989,7 @@ class BlobContainerDataSet(DataSet):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'container_name': {'required': True},
@@ -949,6 +1002,7 @@ class BlobContainerDataSet(DataSet):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'container_name': {'key': 'properties.containerName', 'type': 'str'},
@@ -963,12 +1017,12 @@ def __init__(
**kwargs
):
super(BlobContainerDataSet, self).__init__(**kwargs)
- self.kind = 'Container'
- self.container_name = kwargs.get('container_name', None)
+ self.kind = 'Container' # type: str
+ self.container_name = kwargs['container_name']
self.data_set_id = None
- self.resource_group = kwargs.get('resource_group', None)
- self.storage_account_name = kwargs.get('storage_account_name', None)
- self.subscription_id = kwargs.get('subscription_id', None)
+ self.resource_group = kwargs['resource_group']
+ self.storage_account_name = kwargs['storage_account_name']
+ self.subscription_id = kwargs['subscription_id']
class BlobContainerDataSetMapping(DataSetMapping):
@@ -982,23 +1036,25 @@ class BlobContainerDataSetMapping(DataSetMapping):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set mapping.Constant filled by server. Possible values
- include: 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder',
- 'AdlsGen2File', 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase',
- 'SqlDBTable', 'SqlDWTable', 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder",
+ "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetMappingKind
:param container_name: Required. BLOB Container name.
:type container_name: str
:param data_set_id: Required. The id of the source data set.
:type data_set_id: str
:ivar data_set_mapping_status: Gets the status of the data set mapping. Possible values
- include: 'Ok', 'Broken'.
+ include: "Ok", "Broken".
:vartype data_set_mapping_status: str or
~data_share_management_client.models.DataSetMappingStatus
:ivar provisioning_state: Provisioning state of the data set mapping. Possible values include:
- 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed'.
+ "Succeeded", "Creating", "Deleting", "Moving", "Failed".
:vartype provisioning_state: str or ~data_share_management_client.models.ProvisioningState
:param resource_group: Required. Resource group of storage account.
:type resource_group: str
@@ -1011,6 +1067,7 @@ class BlobContainerDataSetMapping(DataSetMapping):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'container_name': {'required': True},
@@ -1025,6 +1082,7 @@ class BlobContainerDataSetMapping(DataSetMapping):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'container_name': {'key': 'properties.containerName', 'type': 'str'},
@@ -1041,14 +1099,14 @@ def __init__(
**kwargs
):
super(BlobContainerDataSetMapping, self).__init__(**kwargs)
- self.kind = 'Container'
- self.container_name = kwargs.get('container_name', None)
- self.data_set_id = kwargs.get('data_set_id', None)
+ self.kind = 'Container' # type: str
+ self.container_name = kwargs['container_name']
+ self.data_set_id = kwargs['data_set_id']
self.data_set_mapping_status = None
self.provisioning_state = None
- self.resource_group = kwargs.get('resource_group', None)
- self.storage_account_name = kwargs.get('storage_account_name', None)
- self.subscription_id = kwargs.get('subscription_id', None)
+ self.resource_group = kwargs['resource_group']
+ self.storage_account_name = kwargs['storage_account_name']
+ self.subscription_id = kwargs['subscription_id']
class BlobDataSet(DataSet):
@@ -1062,13 +1120,15 @@ class BlobDataSet(DataSet):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set.Constant filled by server. Possible values include:
- 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder', 'AdlsGen2File',
- 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase', 'SqlDBTable', 'SqlDWTable',
- 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File",
+ "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetKind
:param container_name: Required. Container that has the file path.
:type container_name: str
:ivar data_set_id: Unique id for identifying a data set resource.
@@ -1086,6 +1146,7 @@ class BlobDataSet(DataSet):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'container_name': {'required': True},
@@ -1099,6 +1160,7 @@ class BlobDataSet(DataSet):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'container_name': {'key': 'properties.containerName', 'type': 'str'},
@@ -1114,13 +1176,13 @@ def __init__(
**kwargs
):
super(BlobDataSet, self).__init__(**kwargs)
- self.kind = 'Blob'
- self.container_name = kwargs.get('container_name', None)
+ self.kind = 'Blob' # type: str
+ self.container_name = kwargs['container_name']
self.data_set_id = None
- self.file_path = kwargs.get('file_path', None)
- self.resource_group = kwargs.get('resource_group', None)
- self.storage_account_name = kwargs.get('storage_account_name', None)
- self.subscription_id = kwargs.get('subscription_id', None)
+ self.file_path = kwargs['file_path']
+ self.resource_group = kwargs['resource_group']
+ self.storage_account_name = kwargs['storage_account_name']
+ self.subscription_id = kwargs['subscription_id']
class BlobDataSetMapping(DataSetMapping):
@@ -1134,27 +1196,29 @@ class BlobDataSetMapping(DataSetMapping):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set mapping.Constant filled by server. Possible values
- include: 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder',
- 'AdlsGen2File', 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase',
- 'SqlDBTable', 'SqlDWTable', 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder",
+ "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetMappingKind
:param container_name: Required. Container that has the file path.
:type container_name: str
:param data_set_id: Required. The id of the source data set.
:type data_set_id: str
:ivar data_set_mapping_status: Gets the status of the data set mapping. Possible values
- include: 'Ok', 'Broken'.
+ include: "Ok", "Broken".
:vartype data_set_mapping_status: str or
~data_share_management_client.models.DataSetMappingStatus
:param file_path: Required. File path within the source data set.
:type file_path: str
- :param output_type: File output type. Possible values include: 'Csv', 'Parquet'.
+ :param output_type: File output type. Possible values include: "Csv", "Parquet".
:type output_type: str or ~data_share_management_client.models.OutputType
:ivar provisioning_state: Provisioning state of the data set mapping. Possible values include:
- 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed'.
+ "Succeeded", "Creating", "Deleting", "Moving", "Failed".
:vartype provisioning_state: str or ~data_share_management_client.models.ProvisioningState
:param resource_group: Required. Resource group of storage account.
:type resource_group: str
@@ -1167,6 +1231,7 @@ class BlobDataSetMapping(DataSetMapping):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'container_name': {'required': True},
@@ -1182,6 +1247,7 @@ class BlobDataSetMapping(DataSetMapping):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'container_name': {'key': 'properties.containerName', 'type': 'str'},
@@ -1200,16 +1266,16 @@ def __init__(
**kwargs
):
super(BlobDataSetMapping, self).__init__(**kwargs)
- self.kind = 'Blob'
- self.container_name = kwargs.get('container_name', None)
- self.data_set_id = kwargs.get('data_set_id', None)
+ self.kind = 'Blob' # type: str
+ self.container_name = kwargs['container_name']
+ self.data_set_id = kwargs['data_set_id']
self.data_set_mapping_status = None
- self.file_path = kwargs.get('file_path', None)
+ self.file_path = kwargs['file_path']
self.output_type = kwargs.get('output_type', None)
self.provisioning_state = None
- self.resource_group = kwargs.get('resource_group', None)
- self.storage_account_name = kwargs.get('storage_account_name', None)
- self.subscription_id = kwargs.get('subscription_id', None)
+ self.resource_group = kwargs['resource_group']
+ self.storage_account_name = kwargs['storage_account_name']
+ self.subscription_id = kwargs['subscription_id']
class BlobFolderDataSet(DataSet):
@@ -1223,13 +1289,15 @@ class BlobFolderDataSet(DataSet):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set.Constant filled by server. Possible values include:
- 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder', 'AdlsGen2File',
- 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase', 'SqlDBTable', 'SqlDWTable',
- 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File",
+ "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetKind
:param container_name: Required. Container that has the file path.
:type container_name: str
:ivar data_set_id: Unique id for identifying a data set resource.
@@ -1247,6 +1315,7 @@ class BlobFolderDataSet(DataSet):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'container_name': {'required': True},
@@ -1260,6 +1329,7 @@ class BlobFolderDataSet(DataSet):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'container_name': {'key': 'properties.containerName', 'type': 'str'},
@@ -1275,13 +1345,13 @@ def __init__(
**kwargs
):
super(BlobFolderDataSet, self).__init__(**kwargs)
- self.kind = 'BlobFolder'
- self.container_name = kwargs.get('container_name', None)
+ self.kind = 'BlobFolder' # type: str
+ self.container_name = kwargs['container_name']
self.data_set_id = None
- self.prefix = kwargs.get('prefix', None)
- self.resource_group = kwargs.get('resource_group', None)
- self.storage_account_name = kwargs.get('storage_account_name', None)
- self.subscription_id = kwargs.get('subscription_id', None)
+ self.prefix = kwargs['prefix']
+ self.resource_group = kwargs['resource_group']
+ self.storage_account_name = kwargs['storage_account_name']
+ self.subscription_id = kwargs['subscription_id']
class BlobFolderDataSetMapping(DataSetMapping):
@@ -1295,25 +1365,27 @@ class BlobFolderDataSetMapping(DataSetMapping):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set mapping.Constant filled by server. Possible values
- include: 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder',
- 'AdlsGen2File', 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase',
- 'SqlDBTable', 'SqlDWTable', 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder",
+ "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetMappingKind
:param container_name: Required. Container that has the file path.
:type container_name: str
:param data_set_id: Required. The id of the source data set.
:type data_set_id: str
:ivar data_set_mapping_status: Gets the status of the data set mapping. Possible values
- include: 'Ok', 'Broken'.
+ include: "Ok", "Broken".
:vartype data_set_mapping_status: str or
~data_share_management_client.models.DataSetMappingStatus
:param prefix: Required. Prefix for blob folder.
:type prefix: str
:ivar provisioning_state: Provisioning state of the data set mapping. Possible values include:
- 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed'.
+ "Succeeded", "Creating", "Deleting", "Moving", "Failed".
:vartype provisioning_state: str or ~data_share_management_client.models.ProvisioningState
:param resource_group: Required. Resource group of storage account.
:type resource_group: str
@@ -1326,6 +1398,7 @@ class BlobFolderDataSetMapping(DataSetMapping):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'container_name': {'required': True},
@@ -1341,6 +1414,7 @@ class BlobFolderDataSetMapping(DataSetMapping):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'container_name': {'key': 'properties.containerName', 'type': 'str'},
@@ -1358,15 +1432,15 @@ def __init__(
**kwargs
):
super(BlobFolderDataSetMapping, self).__init__(**kwargs)
- self.kind = 'BlobFolder'
- self.container_name = kwargs.get('container_name', None)
- self.data_set_id = kwargs.get('data_set_id', None)
+ self.kind = 'BlobFolder' # type: str
+ self.container_name = kwargs['container_name']
+ self.data_set_id = kwargs['data_set_id']
self.data_set_mapping_status = None
- self.prefix = kwargs.get('prefix', None)
+ self.prefix = kwargs['prefix']
self.provisioning_state = None
- self.resource_group = kwargs.get('resource_group', None)
- self.storage_account_name = kwargs.get('storage_account_name', None)
- self.subscription_id = kwargs.get('subscription_id', None)
+ self.resource_group = kwargs['resource_group']
+ self.storage_account_name = kwargs['storage_account_name']
+ self.subscription_id = kwargs['subscription_id']
class ConsumerInvitation(ProxyDto):
@@ -1380,16 +1454,21 @@ class ConsumerInvitation(ProxyDto):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:ivar data_set_count: Number of data sets in a share.
:vartype data_set_count: int
:ivar description: Description shared when the invitation was created.
:vartype description: str
+ :ivar expiration_date: The expiration date for the share subscription created by accepting the
+ invitation.
+ :vartype expiration_date: ~datetime.datetime
:param invitation_id: Required. Unique id of the invitation.
:type invitation_id: str
- :ivar invitation_status: The status of the invitation. Possible values include: 'Pending',
- 'Accepted', 'Rejected', 'Withdrawn'.
+ :ivar invitation_status: The status of the invitation. Possible values include: "Pending",
+ "Accepted", "Rejected", "Withdrawn".
:vartype invitation_status: str or ~data_share_management_client.models.InvitationStatus
:ivar location: invitation location.
:vartype location: str
@@ -1416,9 +1495,11 @@ class ConsumerInvitation(ProxyDto):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'data_set_count': {'readonly': True},
'description': {'readonly': True},
+ 'expiration_date': {'readonly': True},
'invitation_id': {'required': True},
'invitation_status': {'readonly': True},
'location': {'readonly': True},
@@ -1436,9 +1517,11 @@ class ConsumerInvitation(ProxyDto):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'data_set_count': {'key': 'properties.dataSetCount', 'type': 'int'},
'description': {'key': 'properties.description', 'type': 'str'},
+ 'expiration_date': {'key': 'properties.expirationDate', 'type': 'iso-8601'},
'invitation_id': {'key': 'properties.invitationId', 'type': 'str'},
'invitation_status': {'key': 'properties.invitationStatus', 'type': 'str'},
'location': {'key': 'properties.location', 'type': 'str'},
@@ -1460,7 +1543,8 @@ def __init__(
super(ConsumerInvitation, self).__init__(**kwargs)
self.data_set_count = None
self.description = None
- self.invitation_id = kwargs.get('invitation_id', None)
+ self.expiration_date = None
+ self.invitation_id = kwargs['invitation_id']
self.invitation_status = None
self.location = None
self.provider_email = None
@@ -1500,7 +1584,7 @@ def __init__(
):
super(ConsumerInvitationList, self).__init__(**kwargs)
self.next_link = kwargs.get('next_link', None)
- self.value = kwargs.get('value', None)
+ self.value = kwargs['value']
class ConsumerSourceDataSet(ProxyDto):
@@ -1512,6 +1596,8 @@ class ConsumerSourceDataSet(ProxyDto):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:ivar data_set_id: DataSet Id.
@@ -1522,15 +1608,17 @@ class ConsumerSourceDataSet(ProxyDto):
:vartype data_set_name: str
:ivar data_set_path: DataSet path.
:vartype data_set_path: str
- :ivar data_set_type: Type of data set. Possible values include: 'Blob', 'Container',
- 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder', 'AdlsGen2File', 'AdlsGen1Folder',
- 'AdlsGen1File', 'KustoCluster', 'KustoDatabase', 'SqlDBTable', 'SqlDWTable'.
+ :ivar data_set_type: Type of data set. Possible values include: "Blob", "Container",
+ "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File", "AdlsGen1Folder",
+ "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
:vartype data_set_type: str or ~data_share_management_client.models.DataSetType
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'data_set_id': {'readonly': True},
'data_set_location': {'readonly': True},
@@ -1542,6 +1630,7 @@ class ConsumerSourceDataSet(ProxyDto):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'},
'data_set_location': {'key': 'properties.dataSetLocation', 'type': 'str'},
@@ -1588,7 +1677,7 @@ def __init__(
):
super(ConsumerSourceDataSetList, self).__init__(**kwargs)
self.next_link = kwargs.get('next_link', None)
- self.value = kwargs.get('value', None)
+ self.value = kwargs['value']
class DataSetList(msrest.serialization.Model):
@@ -1617,7 +1706,7 @@ def __init__(
):
super(DataSetList, self).__init__(**kwargs)
self.next_link = kwargs.get('next_link', None)
- self.value = kwargs.get('value', None)
+ self.value = kwargs['value']
class DataSetMappingList(msrest.serialization.Model):
@@ -1646,7 +1735,7 @@ def __init__(
):
super(DataSetMappingList, self).__init__(**kwargs)
self.next_link = kwargs.get('next_link', None)
- self.value = kwargs.get('value', None)
+ self.value = kwargs['value']
class DataShareError(msrest.serialization.Model):
@@ -1671,7 +1760,7 @@ def __init__(
**kwargs
):
super(DataShareError, self).__init__(**kwargs)
- self.error = kwargs.get('error', None)
+ self.error = kwargs['error']
class DataShareErrorInfo(msrest.serialization.Model):
@@ -1706,9 +1795,9 @@ def __init__(
**kwargs
):
super(DataShareErrorInfo, self).__init__(**kwargs)
- self.code = kwargs.get('code', None)
+ self.code = kwargs['code']
self.details = kwargs.get('details', None)
- self.message = kwargs.get('message', None)
+ self.message = kwargs['message']
self.target = kwargs.get('target', None)
@@ -1744,14 +1833,13 @@ class Identity(msrest.serialization.Model):
:vartype principal_id: str
:ivar tenant_id: Tenant Id.
:vartype tenant_id: str
- :ivar type: Identity Type. Default value: "SystemAssigned".
- :vartype type: str
+ :param type: Identity Type. Possible values include: "SystemAssigned".
+ :type type: str or ~data_share_management_client.models.Type
"""
_validation = {
'principal_id': {'readonly': True},
'tenant_id': {'readonly': True},
- 'type': {'constant': True},
}
_attribute_map = {
@@ -1760,8 +1848,6 @@ class Identity(msrest.serialization.Model):
'type': {'key': 'type', 'type': 'str'},
}
- type = "SystemAssigned"
-
def __init__(
self,
**kwargs
@@ -1769,6 +1855,7 @@ def __init__(
super(Identity, self).__init__(**kwargs)
self.principal_id = None
self.tenant_id = None
+ self.type = kwargs.get('type', None)
class Invitation(ProxyDto):
@@ -1780,12 +1867,16 @@ class Invitation(ProxyDto):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
+ :param expiration_date: The expiration date for the invitation and share subscription.
+ :type expiration_date: ~datetime.datetime
:ivar invitation_id: unique invitation id.
:vartype invitation_id: str
- :ivar invitation_status: The status of the invitation. Possible values include: 'Pending',
- 'Accepted', 'Rejected', 'Withdrawn'.
+ :ivar invitation_status: The status of the invitation. Possible values include: "Pending",
+ "Accepted", "Rejected", "Withdrawn".
:vartype invitation_status: str or ~data_share_management_client.models.InvitationStatus
:ivar responded_at: The time the recipient responded to the invitation.
:vartype responded_at: ~datetime.datetime
@@ -1808,6 +1899,7 @@ class Invitation(ProxyDto):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'invitation_id': {'readonly': True},
'invitation_status': {'readonly': True},
@@ -1820,7 +1912,9 @@ class Invitation(ProxyDto):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
+ 'expiration_date': {'key': 'properties.expirationDate', 'type': 'iso-8601'},
'invitation_id': {'key': 'properties.invitationId', 'type': 'str'},
'invitation_status': {'key': 'properties.invitationStatus', 'type': 'str'},
'responded_at': {'key': 'properties.respondedAt', 'type': 'iso-8601'},
@@ -1837,6 +1931,7 @@ def __init__(
**kwargs
):
super(Invitation, self).__init__(**kwargs)
+ self.expiration_date = kwargs.get('expiration_date', None)
self.invitation_id = None
self.invitation_status = None
self.responded_at = None
@@ -1874,7 +1969,7 @@ def __init__(
):
super(InvitationList, self).__init__(**kwargs)
self.next_link = kwargs.get('next_link', None)
- self.value = kwargs.get('value', None)
+ self.value = kwargs['value']
class KustoClusterDataSet(DataSet):
@@ -1888,13 +1983,15 @@ class KustoClusterDataSet(DataSet):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set.Constant filled by server. Possible values include:
- 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder', 'AdlsGen2File',
- 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase', 'SqlDBTable', 'SqlDWTable',
- 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File",
+ "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetKind
:ivar data_set_id: Unique id for identifying a data set resource.
:vartype data_set_id: str
:param kusto_cluster_resource_id: Required. Resource id of the kusto cluster.
@@ -1902,13 +1999,14 @@ class KustoClusterDataSet(DataSet):
:ivar location: Location of the kusto cluster.
:vartype location: str
:ivar provisioning_state: Provisioning state of the kusto cluster data set. Possible values
- include: 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed'.
+ include: "Succeeded", "Creating", "Deleting", "Moving", "Failed".
:vartype provisioning_state: str or ~data_share_management_client.models.ProvisioningState
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'data_set_id': {'readonly': True},
@@ -1920,6 +2018,7 @@ class KustoClusterDataSet(DataSet):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'},
@@ -1933,9 +2032,9 @@ def __init__(
**kwargs
):
super(KustoClusterDataSet, self).__init__(**kwargs)
- self.kind = 'KustoCluster'
+ self.kind = 'KustoCluster' # type: str
self.data_set_id = None
- self.kusto_cluster_resource_id = kwargs.get('kusto_cluster_resource_id', None)
+ self.kusto_cluster_resource_id = kwargs['kusto_cluster_resource_id']
self.location = None
self.provisioning_state = None
@@ -1951,17 +2050,19 @@ class KustoClusterDataSetMapping(DataSetMapping):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set mapping.Constant filled by server. Possible values
- include: 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder',
- 'AdlsGen2File', 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase',
- 'SqlDBTable', 'SqlDWTable', 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder",
+ "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetMappingKind
:param data_set_id: Required. The id of the source data set.
:type data_set_id: str
:ivar data_set_mapping_status: Gets the status of the data set mapping. Possible values
- include: 'Ok', 'Broken'.
+ include: "Ok", "Broken".
:vartype data_set_mapping_status: str or
~data_share_management_client.models.DataSetMappingStatus
:param kusto_cluster_resource_id: Required. Resource id of the sink kusto cluster.
@@ -1969,13 +2070,14 @@ class KustoClusterDataSetMapping(DataSetMapping):
:ivar location: Location of the sink kusto cluster.
:vartype location: str
:ivar provisioning_state: Provisioning state of the data set mapping. Possible values include:
- 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed'.
+ "Succeeded", "Creating", "Deleting", "Moving", "Failed".
:vartype provisioning_state: str or ~data_share_management_client.models.ProvisioningState
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'data_set_id': {'required': True},
@@ -1988,6 +2090,7 @@ class KustoClusterDataSetMapping(DataSetMapping):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'},
@@ -2002,10 +2105,10 @@ def __init__(
**kwargs
):
super(KustoClusterDataSetMapping, self).__init__(**kwargs)
- self.kind = 'KustoCluster'
- self.data_set_id = kwargs.get('data_set_id', None)
+ self.kind = 'KustoCluster' # type: str
+ self.data_set_id = kwargs['data_set_id']
self.data_set_mapping_status = None
- self.kusto_cluster_resource_id = kwargs.get('kusto_cluster_resource_id', None)
+ self.kusto_cluster_resource_id = kwargs['kusto_cluster_resource_id']
self.location = None
self.provisioning_state = None
@@ -2021,13 +2124,15 @@ class KustoDatabaseDataSet(DataSet):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set.Constant filled by server. Possible values include:
- 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder', 'AdlsGen2File',
- 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase', 'SqlDBTable', 'SqlDWTable',
- 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File",
+ "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetKind
:ivar data_set_id: Unique id for identifying a data set resource.
:vartype data_set_id: str
:param kusto_database_resource_id: Required. Resource id of the kusto database.
@@ -2035,13 +2140,14 @@ class KustoDatabaseDataSet(DataSet):
:ivar location: Location of the kusto cluster.
:vartype location: str
:ivar provisioning_state: Provisioning state of the kusto database data set. Possible values
- include: 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed'.
+ include: "Succeeded", "Creating", "Deleting", "Moving", "Failed".
:vartype provisioning_state: str or ~data_share_management_client.models.ProvisioningState
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'data_set_id': {'readonly': True},
@@ -2053,6 +2159,7 @@ class KustoDatabaseDataSet(DataSet):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'},
@@ -2066,9 +2173,9 @@ def __init__(
**kwargs
):
super(KustoDatabaseDataSet, self).__init__(**kwargs)
- self.kind = 'KustoDatabase'
+ self.kind = 'KustoDatabase' # type: str
self.data_set_id = None
- self.kusto_database_resource_id = kwargs.get('kusto_database_resource_id', None)
+ self.kusto_database_resource_id = kwargs['kusto_database_resource_id']
self.location = None
self.provisioning_state = None
@@ -2084,17 +2191,19 @@ class KustoDatabaseDataSetMapping(DataSetMapping):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set mapping.Constant filled by server. Possible values
- include: 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder',
- 'AdlsGen2File', 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase',
- 'SqlDBTable', 'SqlDWTable', 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder",
+ "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetMappingKind
:param data_set_id: Required. The id of the source data set.
:type data_set_id: str
:ivar data_set_mapping_status: Gets the status of the data set mapping. Possible values
- include: 'Ok', 'Broken'.
+ include: "Ok", "Broken".
:vartype data_set_mapping_status: str or
~data_share_management_client.models.DataSetMappingStatus
:param kusto_cluster_resource_id: Required. Resource id of the sink kusto cluster.
@@ -2102,13 +2211,14 @@ class KustoDatabaseDataSetMapping(DataSetMapping):
:ivar location: Location of the sink kusto cluster.
:vartype location: str
:ivar provisioning_state: Provisioning state of the data set mapping. Possible values include:
- 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed'.
+ "Succeeded", "Creating", "Deleting", "Moving", "Failed".
:vartype provisioning_state: str or ~data_share_management_client.models.ProvisioningState
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'data_set_id': {'required': True},
@@ -2121,6 +2231,7 @@ class KustoDatabaseDataSetMapping(DataSetMapping):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'},
@@ -2135,10 +2246,10 @@ def __init__(
**kwargs
):
super(KustoDatabaseDataSetMapping, self).__init__(**kwargs)
- self.kind = 'KustoDatabase'
- self.data_set_id = kwargs.get('data_set_id', None)
+ self.kind = 'KustoDatabase' # type: str
+ self.data_set_id = kwargs['data_set_id']
self.data_set_mapping_status = None
- self.kusto_cluster_resource_id = kwargs.get('kusto_cluster_resource_id', None)
+ self.kusto_cluster_resource_id = kwargs['kusto_cluster_resource_id']
self.location = None
self.provisioning_state = None
@@ -2169,7 +2280,7 @@ def __init__(
):
super(OperationList, self).__init__(**kwargs)
self.next_link = kwargs.get('next_link', None)
- self.value = kwargs.get('value', None)
+ self.value = kwargs['value']
class OperationMetaLogSpecification(msrest.serialization.Model):
@@ -2212,6 +2323,8 @@ class OperationMetaMetricSpecification(msrest.serialization.Model):
:type display_name: str
:param enable_regional_mdm_account: enable regional mdm account.
:type enable_regional_mdm_account: str
+ :param fill_gap_with_zero: fill gap with zero.
+ :type fill_gap_with_zero: bool
:param internal_metric_name: internal metric name.
:type internal_metric_name: str
:param name: name of the metric.
@@ -2233,6 +2346,7 @@ class OperationMetaMetricSpecification(msrest.serialization.Model):
'display_description': {'key': 'displayDescription', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'enable_regional_mdm_account': {'key': 'enableRegionalMdmAccount', 'type': 'str'},
+ 'fill_gap_with_zero': {'key': 'fillGapWithZero', 'type': 'bool'},
'internal_metric_name': {'key': 'internalMetricName', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'resource_id_dimension_name_override': {'key': 'resourceIdDimensionNameOverride', 'type': 'str'},
@@ -2251,6 +2365,7 @@ def __init__(
self.display_description = kwargs.get('display_description', None)
self.display_name = kwargs.get('display_name', None)
self.enable_regional_mdm_account = kwargs.get('enable_regional_mdm_account', None)
+ self.fill_gap_with_zero = kwargs.get('fill_gap_with_zero', None)
self.internal_metric_name = kwargs.get('internal_metric_name', None)
self.name = kwargs.get('name', None)
self.resource_id_dimension_name_override = kwargs.get('resource_id_dimension_name_override', None)
@@ -2359,7 +2474,7 @@ class OperationResponse(msrest.serialization.Model):
:param start_time: start time.
:type start_time: ~datetime.datetime
:param status: Required. Operation state of the long running operation. Possible values
- include: 'Accepted', 'InProgress', 'TransientFailure', 'Succeeded', 'Failed', 'Canceled'.
+ include: "Accepted", "InProgress", "TransientFailure", "Succeeded", "Failed", "Canceled".
:type status: str or ~data_share_management_client.models.Status
"""
@@ -2382,7 +2497,7 @@ def __init__(
self.end_time = kwargs.get('end_time', None)
self.error = kwargs.get('error', None)
self.start_time = kwargs.get('start_time', None)
- self.status = kwargs.get('status', None)
+ self.status = kwargs['status']
class ProviderShareSubscription(ProxyDto):
@@ -2394,6 +2509,8 @@ class ProviderShareSubscription(ProxyDto):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:ivar consumer_email: Email of the consumer who created the share subscription.
@@ -2404,6 +2521,8 @@ class ProviderShareSubscription(ProxyDto):
:vartype consumer_tenant_name: str
:ivar created_at: created at.
:vartype created_at: ~datetime.datetime
+ :param expiration_date: Expiration date of the share subscription in UTC format.
+ :type expiration_date: ~datetime.datetime
:ivar provider_email: Email of the provider who created the share.
:vartype provider_email: str
:ivar provider_name: Name of the provider who created the share.
@@ -2413,7 +2532,7 @@ class ProviderShareSubscription(ProxyDto):
:ivar share_subscription_object_id: share Subscription Object Id.
:vartype share_subscription_object_id: str
:ivar share_subscription_status: Gets the status of share subscription. Possible values
- include: 'Active', 'Revoked', 'SourceDeleted', 'Revoking'.
+ include: "Active", "Revoked", "SourceDeleted", "Revoking".
:vartype share_subscription_status: str or
~data_share_management_client.models.ShareSubscriptionStatus
"""
@@ -2421,6 +2540,7 @@ class ProviderShareSubscription(ProxyDto):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'consumer_email': {'readonly': True},
'consumer_name': {'readonly': True},
@@ -2436,11 +2556,13 @@ class ProviderShareSubscription(ProxyDto):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'consumer_email': {'key': 'properties.consumerEmail', 'type': 'str'},
'consumer_name': {'key': 'properties.consumerName', 'type': 'str'},
'consumer_tenant_name': {'key': 'properties.consumerTenantName', 'type': 'str'},
'created_at': {'key': 'properties.createdAt', 'type': 'iso-8601'},
+ 'expiration_date': {'key': 'properties.expirationDate', 'type': 'iso-8601'},
'provider_email': {'key': 'properties.providerEmail', 'type': 'str'},
'provider_name': {'key': 'properties.providerName', 'type': 'str'},
'shared_at': {'key': 'properties.sharedAt', 'type': 'iso-8601'},
@@ -2457,6 +2579,7 @@ def __init__(
self.consumer_name = None
self.consumer_tenant_name = None
self.created_at = None
+ self.expiration_date = kwargs.get('expiration_date', None)
self.provider_email = None
self.provider_name = None
self.shared_at = None
@@ -2490,7 +2613,7 @@ def __init__(
):
super(ProviderShareSubscriptionList, self).__init__(**kwargs)
self.next_link = kwargs.get('next_link', None)
- self.value = kwargs.get('value', None)
+ self.value = kwargs['value']
class SourceShareSynchronizationSetting(msrest.serialization.Model):
@@ -2501,11 +2624,9 @@ class SourceShareSynchronizationSetting(msrest.serialization.Model):
All required parameters must be populated in order to send to Azure.
- :param kind: Required. Kind of synchronization.Constant filled by server. Possible values
- include: 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder',
- 'AdlsGen2File', 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase',
- 'SqlDBTable', 'SqlDWTable', 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ :param kind: Required. Kind of synchronization setting on share.Constant filled by server.
+ Possible values include: "ScheduleBased".
+ :type kind: str or ~data_share_management_client.models.SourceShareSynchronizationSettingKind
"""
_validation = {
@@ -2525,7 +2646,7 @@ def __init__(
**kwargs
):
super(SourceShareSynchronizationSetting, self).__init__(**kwargs)
- self.kind = None
+ self.kind = None # type: Optional[str]
class ScheduledSourceSynchronizationSetting(SourceShareSynchronizationSetting):
@@ -2533,12 +2654,10 @@ class ScheduledSourceSynchronizationSetting(SourceShareSynchronizationSetting):
All required parameters must be populated in order to send to Azure.
- :param kind: Required. Kind of synchronization.Constant filled by server. Possible values
- include: 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder',
- 'AdlsGen2File', 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase',
- 'SqlDBTable', 'SqlDWTable', 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
- :param recurrence_interval: Recurrence Interval. Possible values include: 'Hour', 'Day'.
+ :param kind: Required. Kind of synchronization setting on share.Constant filled by server.
+ Possible values include: "ScheduleBased".
+ :type kind: str or ~data_share_management_client.models.SourceShareSynchronizationSettingKind
+ :param recurrence_interval: Recurrence Interval. Possible values include: "Hour", "Day".
:type recurrence_interval: str or ~data_share_management_client.models.RecurrenceInterval
:param synchronization_time: Synchronization time.
:type synchronization_time: ~datetime.datetime
@@ -2559,7 +2678,7 @@ def __init__(
**kwargs
):
super(ScheduledSourceSynchronizationSetting, self).__init__(**kwargs)
- self.kind = 'ScheduleBased'
+ self.kind = 'ScheduleBased' # type: str
self.recurrence_interval = kwargs.get('recurrence_interval', None)
self.synchronization_time = kwargs.get('synchronization_time', None)
@@ -2578,18 +2697,19 @@ class SynchronizationSetting(ProxyDto):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
- :param kind: Required. Kind of synchronization.Constant filled by server. Possible values
- include: 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder',
- 'AdlsGen2File', 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase',
- 'SqlDBTable', 'SqlDWTable', 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ :param kind: Required. Kind of synchronization setting.Constant filled by server. Possible
+ values include: "ScheduleBased".
+ :type kind: str or ~data_share_management_client.models.SynchronizationSettingKind
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
}
@@ -2597,6 +2717,7 @@ class SynchronizationSetting(ProxyDto):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
}
@@ -2610,7 +2731,7 @@ def __init__(
**kwargs
):
super(SynchronizationSetting, self).__init__(**kwargs)
- self.kind = 'SynchronizationSetting'
+ self.kind = 'SynchronizationSetting' # type: str
class ScheduledSynchronizationSetting(SynchronizationSetting):
@@ -2624,20 +2745,20 @@ class ScheduledSynchronizationSetting(SynchronizationSetting):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
- :param kind: Required. Kind of synchronization.Constant filled by server. Possible values
- include: 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder',
- 'AdlsGen2File', 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase',
- 'SqlDBTable', 'SqlDWTable', 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ :param kind: Required. Kind of synchronization setting.Constant filled by server. Possible
+ values include: "ScheduleBased".
+ :type kind: str or ~data_share_management_client.models.SynchronizationSettingKind
:ivar created_at: Time at which the synchronization setting was created.
:vartype created_at: ~datetime.datetime
:ivar provisioning_state: Gets or sets the provisioning state. Possible values include:
- 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed'.
+ "Succeeded", "Creating", "Deleting", "Moving", "Failed".
:vartype provisioning_state: str or ~data_share_management_client.models.ProvisioningState
- :param recurrence_interval: Required. Recurrence Interval. Possible values include: 'Hour',
- 'Day'.
+ :param recurrence_interval: Required. Recurrence Interval. Possible values include: "Hour",
+ "Day".
:type recurrence_interval: str or ~data_share_management_client.models.RecurrenceInterval
:param synchronization_time: Required. Synchronization time.
:type synchronization_time: ~datetime.datetime
@@ -2648,6 +2769,7 @@ class ScheduledSynchronizationSetting(SynchronizationSetting):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'created_at': {'readonly': True},
@@ -2660,6 +2782,7 @@ class ScheduledSynchronizationSetting(SynchronizationSetting):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'created_at': {'key': 'properties.createdAt', 'type': 'iso-8601'},
@@ -2674,11 +2797,11 @@ def __init__(
**kwargs
):
super(ScheduledSynchronizationSetting, self).__init__(**kwargs)
- self.kind = 'ScheduleBased'
+ self.kind = 'ScheduleBased' # type: str
self.created_at = None
self.provisioning_state = None
- self.recurrence_interval = kwargs.get('recurrence_interval', None)
- self.synchronization_time = kwargs.get('synchronization_time', None)
+ self.recurrence_interval = kwargs['recurrence_interval']
+ self.synchronization_time = kwargs['synchronization_time']
self.user_name = None
@@ -2696,18 +2819,19 @@ class Trigger(ProxyDto):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
- :param kind: Required. Kind of synchronization.Constant filled by server. Possible values
- include: 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder',
- 'AdlsGen2File', 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase',
- 'SqlDBTable', 'SqlDWTable', 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ :param kind: Required. Kind of synchronization on trigger.Constant filled by server. Possible
+ values include: "ScheduleBased".
+ :type kind: str or ~data_share_management_client.models.TriggerKind
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
}
@@ -2715,6 +2839,7 @@ class Trigger(ProxyDto):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
}
@@ -2728,7 +2853,7 @@ def __init__(
**kwargs
):
super(Trigger, self).__init__(**kwargs)
- self.kind = 'Trigger'
+ self.kind = 'Trigger' # type: str
class ScheduledTrigger(Trigger):
@@ -2742,28 +2867,28 @@ class ScheduledTrigger(Trigger):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
- :param kind: Required. Kind of synchronization.Constant filled by server. Possible values
- include: 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder',
- 'AdlsGen2File', 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase',
- 'SqlDBTable', 'SqlDWTable', 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ :param kind: Required. Kind of synchronization on trigger.Constant filled by server. Possible
+ values include: "ScheduleBased".
+ :type kind: str or ~data_share_management_client.models.TriggerKind
:ivar created_at: Time at which the trigger was created.
:vartype created_at: ~datetime.datetime
- :ivar provisioning_state: Gets the provisioning state. Possible values include: 'Succeeded',
- 'Creating', 'Deleting', 'Moving', 'Failed'.
+ :ivar provisioning_state: Gets the provisioning state. Possible values include: "Succeeded",
+ "Creating", "Deleting", "Moving", "Failed".
:vartype provisioning_state: str or ~data_share_management_client.models.ProvisioningState
- :param recurrence_interval: Required. Recurrence Interval. Possible values include: 'Hour',
- 'Day'.
+ :param recurrence_interval: Required. Recurrence Interval. Possible values include: "Hour",
+ "Day".
:type recurrence_interval: str or ~data_share_management_client.models.RecurrenceInterval
- :param synchronization_mode: Synchronization mode. Possible values include: 'Incremental',
- 'FullSync'.
+ :param synchronization_mode: Synchronization mode. Possible values include: "Incremental",
+ "FullSync".
:type synchronization_mode: str or ~data_share_management_client.models.SynchronizationMode
:param synchronization_time: Required. Synchronization time.
:type synchronization_time: ~datetime.datetime
- :ivar trigger_status: Gets the trigger state. Possible values include: 'Active', 'Inactive',
- 'SourceSynchronizationSettingDeleted'.
+ :ivar trigger_status: Gets the trigger state. Possible values include: "Active", "Inactive",
+ "SourceSynchronizationSettingDeleted".
:vartype trigger_status: str or ~data_share_management_client.models.TriggerStatus
:ivar user_name: Name of the user who created the trigger.
:vartype user_name: str
@@ -2772,6 +2897,7 @@ class ScheduledTrigger(Trigger):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'created_at': {'readonly': True},
@@ -2785,6 +2911,7 @@ class ScheduledTrigger(Trigger):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'created_at': {'key': 'properties.createdAt', 'type': 'iso-8601'},
@@ -2801,12 +2928,12 @@ def __init__(
**kwargs
):
super(ScheduledTrigger, self).__init__(**kwargs)
- self.kind = 'ScheduleBased'
+ self.kind = 'ScheduleBased' # type: str
self.created_at = None
self.provisioning_state = None
- self.recurrence_interval = kwargs.get('recurrence_interval', None)
+ self.recurrence_interval = kwargs['recurrence_interval']
self.synchronization_mode = kwargs.get('synchronization_mode', None)
- self.synchronization_time = kwargs.get('synchronization_time', None)
+ self.synchronization_time = kwargs['synchronization_time']
self.trigger_status = None
self.user_name = None
@@ -2820,6 +2947,8 @@ class Share(ProxyDto):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:ivar created_at: Time at which the share was created.
@@ -2827,9 +2956,9 @@ class Share(ProxyDto):
:param description: Share description.
:type description: str
:ivar provisioning_state: Gets or sets the provisioning state. Possible values include:
- 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed'.
+ "Succeeded", "Creating", "Deleting", "Moving", "Failed".
:vartype provisioning_state: str or ~data_share_management_client.models.ProvisioningState
- :param share_kind: Share kind. Possible values include: 'CopyBased', 'InPlace'.
+ :param share_kind: Share kind. Possible values include: "CopyBased", "InPlace".
:type share_kind: str or ~data_share_management_client.models.ShareKind
:param terms: Share terms.
:type terms: str
@@ -2842,6 +2971,7 @@ class Share(ProxyDto):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'created_at': {'readonly': True},
'provisioning_state': {'readonly': True},
@@ -2852,6 +2982,7 @@ class Share(ProxyDto):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'created_at': {'key': 'properties.createdAt', 'type': 'iso-8601'},
'description': {'key': 'properties.description', 'type': 'str'},
@@ -2902,7 +3033,7 @@ def __init__(
):
super(ShareList, self).__init__(**kwargs)
self.next_link = kwargs.get('next_link', None)
- self.value = kwargs.get('value', None)
+ self.value = kwargs['value']
class ShareSubscription(ProxyDto):
@@ -2916,10 +3047,14 @@ class ShareSubscription(ProxyDto):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:ivar created_at: Time at which the share subscription was created.
:vartype created_at: ~datetime.datetime
+ :param expiration_date: The expiration date of the share subscription.
+ :type expiration_date: ~datetime.datetime
:param invitation_id: Required. The invitation id.
:type invitation_id: str
:ivar provider_email: Email of the provider who created the resource.
@@ -2929,16 +3064,16 @@ class ShareSubscription(ProxyDto):
:ivar provider_tenant_name: Tenant name of the provider who created the resource.
:vartype provider_tenant_name: str
:ivar provisioning_state: Provisioning state of the share subscription. Possible values
- include: 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed'.
+ include: "Succeeded", "Creating", "Deleting", "Moving", "Failed".
:vartype provisioning_state: str or ~data_share_management_client.models.ProvisioningState
:ivar share_description: Description of share.
:vartype share_description: str
- :ivar share_kind: Kind of share. Possible values include: 'CopyBased', 'InPlace'.
+ :ivar share_kind: Kind of share. Possible values include: "CopyBased", "InPlace".
:vartype share_kind: str or ~data_share_management_client.models.ShareKind
:ivar share_name: Name of the share.
:vartype share_name: str
:ivar share_subscription_status: Gets the current status of share subscription. Possible values
- include: 'Active', 'Revoked', 'SourceDeleted', 'Revoking'.
+ include: "Active", "Revoked", "SourceDeleted", "Revoking".
:vartype share_subscription_status: str or
~data_share_management_client.models.ShareSubscriptionStatus
:ivar share_terms: Terms of a share.
@@ -2954,6 +3089,7 @@ class ShareSubscription(ProxyDto):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'created_at': {'readonly': True},
'invitation_id': {'required': True},
@@ -2974,8 +3110,10 @@ class ShareSubscription(ProxyDto):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'created_at': {'key': 'properties.createdAt', 'type': 'iso-8601'},
+ 'expiration_date': {'key': 'properties.expirationDate', 'type': 'iso-8601'},
'invitation_id': {'key': 'properties.invitationId', 'type': 'str'},
'provider_email': {'key': 'properties.providerEmail', 'type': 'str'},
'provider_name': {'key': 'properties.providerName', 'type': 'str'},
@@ -2997,7 +3135,8 @@ def __init__(
):
super(ShareSubscription, self).__init__(**kwargs)
self.created_at = None
- self.invitation_id = kwargs.get('invitation_id', None)
+ self.expiration_date = kwargs.get('expiration_date', None)
+ self.invitation_id = kwargs['invitation_id']
self.provider_email = None
self.provider_name = None
self.provider_tenant_name = None
@@ -3007,7 +3146,7 @@ def __init__(
self.share_name = None
self.share_subscription_status = None
self.share_terms = None
- self.source_share_location = kwargs.get('source_share_location', None)
+ self.source_share_location = kwargs['source_share_location']
self.user_email = None
self.user_name = None
@@ -3038,7 +3177,7 @@ def __init__(
):
super(ShareSubscriptionList, self).__init__(**kwargs)
self.next_link = kwargs.get('next_link', None)
- self.value = kwargs.get('value', None)
+ self.value = kwargs['value']
class ShareSubscriptionSynchronization(msrest.serialization.Model):
@@ -3060,8 +3199,8 @@ class ShareSubscriptionSynchronization(msrest.serialization.Model):
:vartype status: str
:param synchronization_id: Required. Synchronization id.
:type synchronization_id: str
- :ivar synchronization_mode: Synchronization Mode. Possible values include: 'Incremental',
- 'FullSync'.
+ :ivar synchronization_mode: Synchronization Mode. Possible values include: "Incremental",
+ "FullSync".
:vartype synchronization_mode: str or ~data_share_management_client.models.SynchronizationMode
"""
@@ -3095,7 +3234,7 @@ def __init__(
self.message = None
self.start_time = None
self.status = None
- self.synchronization_id = kwargs.get('synchronization_id', None)
+ self.synchronization_id = kwargs['synchronization_id']
self.synchronization_mode = None
@@ -3125,7 +3264,7 @@ def __init__(
):
super(ShareSubscriptionSynchronizationList, self).__init__(**kwargs)
self.next_link = kwargs.get('next_link', None)
- self.value = kwargs.get('value', None)
+ self.value = kwargs['value']
class ShareSynchronization(msrest.serialization.Model):
@@ -3151,8 +3290,8 @@ class ShareSynchronization(msrest.serialization.Model):
:type status: str
:param synchronization_id: Synchronization id.
:type synchronization_id: str
- :ivar synchronization_mode: Synchronization mode. Possible values include: 'Incremental',
- 'FullSync'.
+ :ivar synchronization_mode: Synchronization mode. Possible values include: "Incremental",
+ "FullSync".
:vartype synchronization_mode: str or ~data_share_management_client.models.SynchronizationMode
"""
@@ -3216,7 +3355,7 @@ def __init__(
):
super(ShareSynchronizationList, self).__init__(**kwargs)
self.next_link = kwargs.get('next_link', None)
- self.value = kwargs.get('value', None)
+ self.value = kwargs['value']
class SourceShareSynchronizationSettingList(msrest.serialization.Model):
@@ -3245,10 +3384,10 @@ def __init__(
):
super(SourceShareSynchronizationSettingList, self).__init__(**kwargs)
self.next_link = kwargs.get('next_link', None)
- self.value = kwargs.get('value', None)
+ self.value = kwargs['value']
-class SqlDBTableDataSet(DataSet):
+class SqlDbTableDataSet(DataSet):
"""A SQL DB table data set.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -3259,13 +3398,15 @@ class SqlDBTableDataSet(DataSet):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set.Constant filled by server. Possible values include:
- 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder', 'AdlsGen2File',
- 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase', 'SqlDBTable', 'SqlDWTable',
- 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File",
+ "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetKind
:param database_name: Database name of the source data set.
:type database_name: str
:ivar data_set_id: Unique id for identifying a data set resource.
@@ -3281,6 +3422,7 @@ class SqlDBTableDataSet(DataSet):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'data_set_id': {'readonly': True},
@@ -3289,6 +3431,7 @@ class SqlDBTableDataSet(DataSet):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'database_name': {'key': 'properties.databaseName', 'type': 'str'},
@@ -3302,8 +3445,8 @@ def __init__(
self,
**kwargs
):
- super(SqlDBTableDataSet, self).__init__(**kwargs)
- self.kind = 'SqlDBTable'
+ super(SqlDbTableDataSet, self).__init__(**kwargs)
+ self.kind = 'SqlDBTable' # type: str
self.database_name = kwargs.get('database_name', None)
self.data_set_id = None
self.schema_name = kwargs.get('schema_name', None)
@@ -3311,7 +3454,7 @@ def __init__(
self.table_name = kwargs.get('table_name', None)
-class SqlDBTableDataSetMapping(DataSetMapping):
+class SqlDbTableDataSetMapping(DataSetMapping):
"""A SQL DB Table data set mapping.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -3322,23 +3465,25 @@ class SqlDBTableDataSetMapping(DataSetMapping):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set mapping.Constant filled by server. Possible values
- include: 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder',
- 'AdlsGen2File', 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase',
- 'SqlDBTable', 'SqlDWTable', 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder",
+ "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetMappingKind
:param database_name: Required. DatabaseName name of the sink data set.
:type database_name: str
:param data_set_id: Required. The id of the source data set.
:type data_set_id: str
:ivar data_set_mapping_status: Gets the status of the data set mapping. Possible values
- include: 'Ok', 'Broken'.
+ include: "Ok", "Broken".
:vartype data_set_mapping_status: str or
~data_share_management_client.models.DataSetMappingStatus
:ivar provisioning_state: Provisioning state of the data set mapping. Possible values include:
- 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed'.
+ "Succeeded", "Creating", "Deleting", "Moving", "Failed".
:vartype provisioning_state: str or ~data_share_management_client.models.ProvisioningState
:param schema_name: Required. Schema of the table. Default value is dbo.
:type schema_name: str
@@ -3351,6 +3496,7 @@ class SqlDBTableDataSetMapping(DataSetMapping):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'database_name': {'required': True},
@@ -3365,6 +3511,7 @@ class SqlDBTableDataSetMapping(DataSetMapping):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'database_name': {'key': 'properties.databaseName', 'type': 'str'},
@@ -3380,15 +3527,15 @@ def __init__(
self,
**kwargs
):
- super(SqlDBTableDataSetMapping, self).__init__(**kwargs)
- self.kind = 'SqlDBTable'
- self.database_name = kwargs.get('database_name', None)
- self.data_set_id = kwargs.get('data_set_id', None)
+ super(SqlDbTableDataSetMapping, self).__init__(**kwargs)
+ self.kind = 'SqlDBTable' # type: str
+ self.database_name = kwargs['database_name']
+ self.data_set_id = kwargs['data_set_id']
self.data_set_mapping_status = None
self.provisioning_state = None
- self.schema_name = kwargs.get('schema_name', None)
- self.sql_server_resource_id = kwargs.get('sql_server_resource_id', None)
- self.table_name = kwargs.get('table_name', None)
+ self.schema_name = kwargs['schema_name']
+ self.sql_server_resource_id = kwargs['sql_server_resource_id']
+ self.table_name = kwargs['table_name']
class SqlDwTableDataSet(DataSet):
@@ -3402,13 +3549,15 @@ class SqlDwTableDataSet(DataSet):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set.Constant filled by server. Possible values include:
- 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder', 'AdlsGen2File',
- 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase', 'SqlDBTable', 'SqlDWTable',
- 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File",
+ "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetKind
:ivar data_set_id: Unique id for identifying a data set resource.
:vartype data_set_id: str
:param data_warehouse_name: DataWarehouse name of the source data set.
@@ -3424,6 +3573,7 @@ class SqlDwTableDataSet(DataSet):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'data_set_id': {'readonly': True},
@@ -3432,6 +3582,7 @@ class SqlDwTableDataSet(DataSet):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'},
@@ -3446,7 +3597,7 @@ def __init__(
**kwargs
):
super(SqlDwTableDataSet, self).__init__(**kwargs)
- self.kind = 'SqlDWTable'
+ self.kind = 'SqlDWTable' # type: str
self.data_set_id = None
self.data_warehouse_name = kwargs.get('data_warehouse_name', None)
self.schema_name = kwargs.get('schema_name', None)
@@ -3465,23 +3616,25 @@ class SqlDwTableDataSetMapping(DataSetMapping):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set mapping.Constant filled by server. Possible values
- include: 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder',
- 'AdlsGen2File', 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase',
- 'SqlDBTable', 'SqlDWTable', 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder",
+ "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetMappingKind
:param data_set_id: Required. The id of the source data set.
:type data_set_id: str
:ivar data_set_mapping_status: Gets the status of the data set mapping. Possible values
- include: 'Ok', 'Broken'.
+ include: "Ok", "Broken".
:vartype data_set_mapping_status: str or
~data_share_management_client.models.DataSetMappingStatus
:param data_warehouse_name: Required. DataWarehouse name of the source data set.
:type data_warehouse_name: str
:ivar provisioning_state: Provisioning state of the data set mapping. Possible values include:
- 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed'.
+ "Succeeded", "Creating", "Deleting", "Moving", "Failed".
:vartype provisioning_state: str or ~data_share_management_client.models.ProvisioningState
:param schema_name: Required. Schema of the table. Default value is dbo.
:type schema_name: str
@@ -3494,6 +3647,7 @@ class SqlDwTableDataSetMapping(DataSetMapping):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'data_set_id': {'required': True},
@@ -3508,6 +3662,7 @@ class SqlDwTableDataSetMapping(DataSetMapping):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'},
@@ -3524,14 +3679,141 @@ def __init__(
**kwargs
):
super(SqlDwTableDataSetMapping, self).__init__(**kwargs)
- self.kind = 'SqlDWTable'
- self.data_set_id = kwargs.get('data_set_id', None)
+ self.kind = 'SqlDWTable' # type: str
+ self.data_set_id = kwargs['data_set_id']
self.data_set_mapping_status = None
- self.data_warehouse_name = kwargs.get('data_warehouse_name', None)
+ self.data_warehouse_name = kwargs['data_warehouse_name']
self.provisioning_state = None
- self.schema_name = kwargs.get('schema_name', None)
- self.sql_server_resource_id = kwargs.get('sql_server_resource_id', None)
- self.table_name = kwargs.get('table_name', None)
+ self.schema_name = kwargs['schema_name']
+ self.sql_server_resource_id = kwargs['sql_server_resource_id']
+ self.table_name = kwargs['table_name']
+
+
+class SynapseWorkspaceSqlPoolTableDataSet(DataSet):
+ """A Synapse Workspace Sql Pool Table data set.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: The resource id of the azure resource.
+ :vartype id: str
+ :ivar name: Name of the azure resource.
+ :vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
+ :ivar type: Type of the azure resource.
+ :vartype type: str
+ :param kind: Required. Kind of data set.Constant filled by server. Possible values include:
+ "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File",
+ "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetKind
+ :ivar data_set_id: Unique id for identifying a data set resource.
+ :vartype data_set_id: str
+ :param synapse_workspace_sql_pool_table_resource_id: Required. Resource id of the Synapse
+ Workspace SQL Pool Table.
+ :type synapse_workspace_sql_pool_table_resource_id: str
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'system_data': {'readonly': True},
+ 'type': {'readonly': True},
+ 'kind': {'required': True},
+ 'data_set_id': {'readonly': True},
+ 'synapse_workspace_sql_pool_table_resource_id': {'required': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'kind': {'key': 'kind', 'type': 'str'},
+ 'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'},
+ 'synapse_workspace_sql_pool_table_resource_id': {'key': 'properties.synapseWorkspaceSqlPoolTableResourceId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SynapseWorkspaceSqlPoolTableDataSet, self).__init__(**kwargs)
+ self.kind = 'SynapseWorkspaceSqlPoolTable' # type: str
+ self.data_set_id = None
+ self.synapse_workspace_sql_pool_table_resource_id = kwargs['synapse_workspace_sql_pool_table_resource_id']
+
+
+class SynapseWorkspaceSqlPoolTableDataSetMapping(DataSetMapping):
+ """A Synapse Workspace Sql Pool Table data set mapping.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: The resource id of the azure resource.
+ :vartype id: str
+ :ivar name: Name of the azure resource.
+ :vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
+ :ivar type: Type of the azure resource.
+ :vartype type: str
+ :param kind: Required. Kind of data set mapping.Constant filled by server. Possible values
+ include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder",
+ "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetMappingKind
+ :param data_set_id: Required. The id of the source data set.
+ :type data_set_id: str
+ :ivar data_set_mapping_status: Gets the status of the data set mapping. Possible values
+ include: "Ok", "Broken".
+ :vartype data_set_mapping_status: str or
+ ~data_share_management_client.models.DataSetMappingStatus
+ :ivar provisioning_state: Provisioning state of the data set mapping. Possible values include:
+ "Succeeded", "Creating", "Deleting", "Moving", "Failed".
+ :vartype provisioning_state: str or ~data_share_management_client.models.ProvisioningState
+ :param synapse_workspace_sql_pool_table_resource_id: Required. Resource id of the Synapse
+ Workspace SQL Pool Table.
+ :type synapse_workspace_sql_pool_table_resource_id: str
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'system_data': {'readonly': True},
+ 'type': {'readonly': True},
+ 'kind': {'required': True},
+ 'data_set_id': {'required': True},
+ 'data_set_mapping_status': {'readonly': True},
+ 'provisioning_state': {'readonly': True},
+ 'synapse_workspace_sql_pool_table_resource_id': {'required': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'kind': {'key': 'kind', 'type': 'str'},
+ 'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'},
+ 'data_set_mapping_status': {'key': 'properties.dataSetMappingStatus', 'type': 'str'},
+ 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
+ 'synapse_workspace_sql_pool_table_resource_id': {'key': 'properties.synapseWorkspaceSqlPoolTableResourceId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SynapseWorkspaceSqlPoolTableDataSetMapping, self).__init__(**kwargs)
+ self.kind = 'SynapseWorkspaceSqlPoolTable' # type: str
+ self.data_set_id = kwargs['data_set_id']
+ self.data_set_mapping_status = None
+ self.provisioning_state = None
+ self.synapse_workspace_sql_pool_table_resource_id = kwargs['synapse_workspace_sql_pool_table_resource_id']
class SynchronizationDetails(msrest.serialization.Model):
@@ -3541,9 +3823,10 @@ class SynchronizationDetails(msrest.serialization.Model):
:ivar data_set_id: Id of data set.
:vartype data_set_id: str
- :ivar data_set_type: Type of the data set. Possible values include: 'Blob', 'Container',
- 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder', 'AdlsGen2File', 'AdlsGen1Folder',
- 'AdlsGen1File', 'KustoCluster', 'KustoDatabase', 'SqlDBTable', 'SqlDWTable'.
+ :ivar data_set_type: Type of the data set. Possible values include: "Blob", "Container",
+ "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File", "AdlsGen1Folder",
+ "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
:vartype data_set_type: str or ~data_share_management_client.models.DataSetType
:ivar duration_ms: Duration of data set level copy.
:vartype duration_ms: int
@@ -3657,7 +3940,7 @@ def __init__(
):
super(SynchronizationDetailsList, self).__init__(**kwargs)
self.next_link = kwargs.get('next_link', None)
- self.value = kwargs.get('value', None)
+ self.value = kwargs['value']
class SynchronizationSettingList(msrest.serialization.Model):
@@ -3686,14 +3969,14 @@ def __init__(
):
super(SynchronizationSettingList, self).__init__(**kwargs)
self.next_link = kwargs.get('next_link', None)
- self.value = kwargs.get('value', None)
+ self.value = kwargs['value']
class Synchronize(msrest.serialization.Model):
"""Payload for the synchronizing the data.
:param synchronization_mode: Mode of synchronization used in triggers and snapshot sync.
- Incremental by default. Possible values include: 'Incremental', 'FullSync'.
+ Incremental by default. Possible values include: "Incremental", "FullSync".
:type synchronization_mode: str or ~data_share_management_client.models.SynchronizationMode
"""
@@ -3709,6 +3992,47 @@ def __init__(
self.synchronization_mode = kwargs.get('synchronization_mode', None)
+class SystemData(msrest.serialization.Model):
+ """Metadata pertaining to creation and last modification of the resource.
+
+ :param created_at: The timestamp of resource creation (UTC).
+ :type created_at: ~datetime.datetime
+ :param created_by: The identity that created the resource.
+ :type created_by: str
+ :param created_by_type: The type of identity that created the resource. Possible values
+ include: "User", "Application", "ManagedIdentity", "Key".
+ :type created_by_type: str or ~data_share_management_client.models.CreatedByType
+ :param last_modified_at: The type of identity that last modified the resource.
+ :type last_modified_at: ~datetime.datetime
+ :param last_modified_by: The identity that last modified the resource.
+ :type last_modified_by: str
+ :param last_modified_by_type: The type of identity that last modified the resource. Possible
+ values include: "User", "Application", "ManagedIdentity", "Key".
+ :type last_modified_by_type: str or ~data_share_management_client.models.LastModifiedByType
+ """
+
+ _attribute_map = {
+ 'created_at': {'key': 'createdAt', 'type': 'iso-8601'},
+ 'created_by': {'key': 'createdBy', 'type': 'str'},
+ 'created_by_type': {'key': 'createdByType', 'type': 'str'},
+ 'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'},
+ 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'},
+ 'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SystemData, self).__init__(**kwargs)
+ self.created_at = kwargs.get('created_at', None)
+ self.created_by = kwargs.get('created_by', None)
+ self.created_by_type = kwargs.get('created_by_type', None)
+ self.last_modified_at = kwargs.get('last_modified_at', None)
+ self.last_modified_by = kwargs.get('last_modified_by', None)
+ self.last_modified_by_type = kwargs.get('last_modified_by_type', None)
+
+
class TriggerList(msrest.serialization.Model):
"""List response for get triggers.
@@ -3735,4 +4059,4 @@ def __init__(
):
super(TriggerList, self).__init__(**kwargs)
self.next_link = kwargs.get('next_link', None)
- self.value = kwargs.get('value', None)
+ self.value = kwargs['value']
diff --git a/src/datashare/azext_datashare/vendored_sdks/datashare/models/_models_py3.py b/src/datashare/azext_datashare/vendored_sdks/datashare/models/_models_py3.py
index bc585b353f5..79fa6892fc9 100644
--- a/src/datashare/azext_datashare/vendored_sdks/datashare/models/_models_py3.py
+++ b/src/datashare/azext_datashare/vendored_sdks/datashare/models/_models_py3.py
@@ -12,36 +12,82 @@
from azure.core.exceptions import HttpResponseError
import msrest.serialization
+from ._data_share_management_client_enums import *
-class DefaultDto(msrest.serialization.Model):
+
+class ProxyDto(msrest.serialization.Model):
+ """Base data transfer object implementation for proxy resources.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: The resource id of the azure resource.
+ :vartype id: str
+ :ivar name: Name of the azure resource.
+ :vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
+ :ivar type: Type of the azure resource.
+ :vartype type: str
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'system_data': {'readonly': True},
+ 'type': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'type': {'key': 'type', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ProxyDto, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.system_data = None
+ self.type = None
+
+
+class DefaultDto(ProxyDto):
"""Base data transfer object implementation for default resources.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: The resource id of the azure resource.
:vartype id: str
- :param location: Location of the azure resource.
- :type location: str
:ivar name: Name of the azure resource.
:vartype name: str
- :param tags: A set of tags. Tags on the azure resource.
- :type tags: dict[str, str]
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
+ :param location: Location of the azure resource.
+ :type location: str
+ :param tags: A set of tags. Tags on the azure resource.
+ :type tags: dict[str, str]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
- 'location': {'key': 'location', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
- 'tags': {'key': 'tags', 'type': '{str}'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
}
def __init__(
@@ -52,11 +98,8 @@ def __init__(
**kwargs
):
super(DefaultDto, self).__init__(**kwargs)
- self.id = None
self.location = location
- self.name = None
self.tags = tags
- self.type = None
class Account(DefaultDto):
@@ -68,20 +111,22 @@ class Account(DefaultDto):
:ivar id: The resource id of the azure resource.
:vartype id: str
- :param location: Location of the azure resource.
- :type location: str
:ivar name: Name of the azure resource.
:vartype name: str
- :param tags: A set of tags. Tags on the azure resource.
- :type tags: dict[str, str]
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
+ :param location: Location of the azure resource.
+ :type location: str
+ :param tags: A set of tags. Tags on the azure resource.
+ :type tags: dict[str, str]
:param identity: Required. Identity Info on the Account.
:type identity: ~data_share_management_client.models.Identity
:ivar created_at: Time at which the account was created.
:vartype created_at: ~datetime.datetime
:ivar provisioning_state: Provisioning state of the Account. Possible values include:
- 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed'.
+ "Succeeded", "Creating", "Deleting", "Moving", "Failed".
:vartype provisioning_state: str or ~data_share_management_client.models.ProvisioningState
:ivar user_email: Email of the user who created the resource.
:vartype user_email: str
@@ -92,6 +137,7 @@ class Account(DefaultDto):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'identity': {'required': True},
'created_at': {'readonly': True},
@@ -102,10 +148,11 @@ class Account(DefaultDto):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
- 'location': {'key': 'location', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
- 'tags': {'key': 'tags', 'type': '{str}'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
'identity': {'key': 'identity', 'type': 'Identity'},
'created_at': {'key': 'properties.createdAt', 'type': 'iso-8601'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
@@ -182,46 +229,11 @@ def __init__(
self.tags = tags
-class ProxyDto(msrest.serialization.Model):
- """Base data transfer object implementation for proxy resources.
-
- Variables are only populated by the server, and will be ignored when sending a request.
-
- :ivar id: The resource id of the azure resource.
- :vartype id: str
- :ivar name: Name of the azure resource.
- :vartype name: str
- :ivar type: Type of the azure resource.
- :vartype type: str
- """
-
- _validation = {
- 'id': {'readonly': True},
- 'name': {'readonly': True},
- 'type': {'readonly': True},
- }
-
- _attribute_map = {
- 'id': {'key': 'id', 'type': 'str'},
- 'name': {'key': 'name', 'type': 'str'},
- 'type': {'key': 'type', 'type': 'str'},
- }
-
- def __init__(
- self,
- **kwargs
- ):
- super(ProxyDto, self).__init__(**kwargs)
- self.id = None
- self.name = None
- self.type = None
-
-
class DataSet(ProxyDto):
"""A DataSet data transfer object.
You probably want to use the sub-classes and not this class directly. Known
- sub-classes are: AdlsGen1FileDataSet, AdlsGen1FolderDataSet, AdlsGen2FileDataSet, AdlsGen2FileSystemDataSet, AdlsGen2FolderDataSet, BlobDataSet, BlobFolderDataSet, BlobContainerDataSet, KustoClusterDataSet, KustoDatabaseDataSet, SqlDBTableDataSet, SqlDwTableDataSet.
+ sub-classes are: AdlsGen1FileDataSet, AdlsGen1FolderDataSet, AdlsGen2FileDataSet, AdlsGen2FileSystemDataSet, AdlsGen2FolderDataSet, BlobDataSet, BlobFolderDataSet, BlobContainerDataSet, KustoClusterDataSet, KustoDatabaseDataSet, SqlDbTableDataSet, SqlDwTableDataSet, SynapseWorkspaceSqlPoolTableDataSet.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -231,18 +243,21 @@ class DataSet(ProxyDto):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set.Constant filled by server. Possible values include:
- 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder', 'AdlsGen2File',
- 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase', 'SqlDBTable', 'SqlDWTable',
- 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File",
+ "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetKind
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
}
@@ -250,12 +265,13 @@ class DataSet(ProxyDto):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
}
_subtype_map = {
- 'kind': {'AdlsGen1File': 'AdlsGen1FileDataSet', 'AdlsGen1Folder': 'AdlsGen1FolderDataSet', 'AdlsGen2File': 'AdlsGen2FileDataSet', 'AdlsGen2FileSystem': 'AdlsGen2FileSystemDataSet', 'AdlsGen2Folder': 'AdlsGen2FolderDataSet', 'Blob': 'BlobDataSet', 'BlobFolder': 'BlobFolderDataSet', 'Container': 'BlobContainerDataSet', 'KustoCluster': 'KustoClusterDataSet', 'KustoDatabase': 'KustoDatabaseDataSet', 'SqlDBTable': 'SqlDBTableDataSet', 'SqlDWTable': 'SqlDwTableDataSet'}
+ 'kind': {'AdlsGen1File': 'AdlsGen1FileDataSet', 'AdlsGen1Folder': 'AdlsGen1FolderDataSet', 'AdlsGen2File': 'AdlsGen2FileDataSet', 'AdlsGen2FileSystem': 'AdlsGen2FileSystemDataSet', 'AdlsGen2Folder': 'AdlsGen2FolderDataSet', 'Blob': 'BlobDataSet', 'BlobFolder': 'BlobFolderDataSet', 'Container': 'BlobContainerDataSet', 'KustoCluster': 'KustoClusterDataSet', 'KustoDatabase': 'KustoDatabaseDataSet', 'SqlDBTable': 'SqlDbTableDataSet', 'SqlDWTable': 'SqlDwTableDataSet', 'SynapseWorkspaceSqlPoolTable': 'SynapseWorkspaceSqlPoolTableDataSet'}
}
def __init__(
@@ -263,7 +279,7 @@ def __init__(
**kwargs
):
super(DataSet, self).__init__(**kwargs)
- self.kind = 'DataSet'
+ self.kind = 'DataSet' # type: str
class AdlsGen1FileDataSet(DataSet):
@@ -277,13 +293,15 @@ class AdlsGen1FileDataSet(DataSet):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set.Constant filled by server. Possible values include:
- 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder', 'AdlsGen2File',
- 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase', 'SqlDBTable', 'SqlDWTable',
- 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File",
+ "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetKind
:param account_name: Required. The ADLS account name.
:type account_name: str
:ivar data_set_id: Unique id for identifying a data set resource.
@@ -301,6 +319,7 @@ class AdlsGen1FileDataSet(DataSet):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'account_name': {'required': True},
@@ -314,6 +333,7 @@ class AdlsGen1FileDataSet(DataSet):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'account_name': {'key': 'properties.accountName', 'type': 'str'},
@@ -335,7 +355,7 @@ def __init__(
**kwargs
):
super(AdlsGen1FileDataSet, self).__init__(**kwargs)
- self.kind = 'AdlsGen1File'
+ self.kind = 'AdlsGen1File' # type: str
self.account_name = account_name
self.data_set_id = None
self.file_name = file_name
@@ -355,13 +375,15 @@ class AdlsGen1FolderDataSet(DataSet):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set.Constant filled by server. Possible values include:
- 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder', 'AdlsGen2File',
- 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase', 'SqlDBTable', 'SqlDWTable',
- 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File",
+ "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetKind
:param account_name: Required. The ADLS account name.
:type account_name: str
:ivar data_set_id: Unique id for identifying a data set resource.
@@ -377,6 +399,7 @@ class AdlsGen1FolderDataSet(DataSet):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'account_name': {'required': True},
@@ -389,6 +412,7 @@ class AdlsGen1FolderDataSet(DataSet):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'account_name': {'key': 'properties.accountName', 'type': 'str'},
@@ -408,7 +432,7 @@ def __init__(
**kwargs
):
super(AdlsGen1FolderDataSet, self).__init__(**kwargs)
- self.kind = 'AdlsGen1Folder'
+ self.kind = 'AdlsGen1Folder' # type: str
self.account_name = account_name
self.data_set_id = None
self.folder_path = folder_path
@@ -427,13 +451,15 @@ class AdlsGen2FileDataSet(DataSet):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set.Constant filled by server. Possible values include:
- 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder', 'AdlsGen2File',
- 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase', 'SqlDBTable', 'SqlDWTable',
- 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File",
+ "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetKind
:ivar data_set_id: Unique id for identifying a data set resource.
:vartype data_set_id: str
:param file_path: Required. File path within the file system.
@@ -451,6 +477,7 @@ class AdlsGen2FileDataSet(DataSet):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'data_set_id': {'readonly': True},
@@ -464,6 +491,7 @@ class AdlsGen2FileDataSet(DataSet):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'},
@@ -485,7 +513,7 @@ def __init__(
**kwargs
):
super(AdlsGen2FileDataSet, self).__init__(**kwargs)
- self.kind = 'AdlsGen2File'
+ self.kind = 'AdlsGen2File' # type: str
self.data_set_id = None
self.file_path = file_path
self.file_system = file_system
@@ -498,7 +526,7 @@ class DataSetMapping(ProxyDto):
"""A data set mapping data transfer object.
You probably want to use the sub-classes and not this class directly. Known
- sub-classes are: AdlsGen2FileDataSetMapping, AdlsGen2FileSystemDataSetMapping, AdlsGen2FolderDataSetMapping, BlobDataSetMapping, BlobFolderDataSetMapping, BlobContainerDataSetMapping, KustoClusterDataSetMapping, KustoDatabaseDataSetMapping, SqlDBTableDataSetMapping, SqlDwTableDataSetMapping.
+ sub-classes are: AdlsGen2FileDataSetMapping, AdlsGen2FileSystemDataSetMapping, AdlsGen2FolderDataSetMapping, BlobDataSetMapping, BlobFolderDataSetMapping, BlobContainerDataSetMapping, KustoClusterDataSetMapping, KustoDatabaseDataSetMapping, SqlDbTableDataSetMapping, SqlDwTableDataSetMapping, SynapseWorkspaceSqlPoolTableDataSetMapping.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -508,18 +536,21 @@ class DataSetMapping(ProxyDto):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set mapping.Constant filled by server. Possible values
- include: 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder',
- 'AdlsGen2File', 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase',
- 'SqlDBTable', 'SqlDWTable', 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder",
+ "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetMappingKind
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
}
@@ -527,12 +558,13 @@ class DataSetMapping(ProxyDto):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
}
_subtype_map = {
- 'kind': {'AdlsGen2File': 'AdlsGen2FileDataSetMapping', 'AdlsGen2FileSystem': 'AdlsGen2FileSystemDataSetMapping', 'AdlsGen2Folder': 'AdlsGen2FolderDataSetMapping', 'Blob': 'BlobDataSetMapping', 'BlobFolder': 'BlobFolderDataSetMapping', 'Container': 'BlobContainerDataSetMapping', 'KustoCluster': 'KustoClusterDataSetMapping', 'KustoDatabase': 'KustoDatabaseDataSetMapping', 'SqlDBTable': 'SqlDBTableDataSetMapping', 'SqlDWTable': 'SqlDwTableDataSetMapping'}
+ 'kind': {'AdlsGen2File': 'AdlsGen2FileDataSetMapping', 'AdlsGen2FileSystem': 'AdlsGen2FileSystemDataSetMapping', 'AdlsGen2Folder': 'AdlsGen2FolderDataSetMapping', 'Blob': 'BlobDataSetMapping', 'BlobFolder': 'BlobFolderDataSetMapping', 'Container': 'BlobContainerDataSetMapping', 'KustoCluster': 'KustoClusterDataSetMapping', 'KustoDatabase': 'KustoDatabaseDataSetMapping', 'SqlDBTable': 'SqlDbTableDataSetMapping', 'SqlDWTable': 'SqlDwTableDataSetMapping', 'SynapseWorkspaceSqlPoolTable': 'SynapseWorkspaceSqlPoolTableDataSetMapping'}
}
def __init__(
@@ -540,7 +572,7 @@ def __init__(
**kwargs
):
super(DataSetMapping, self).__init__(**kwargs)
- self.kind = 'DataSetMapping'
+ self.kind = 'DataSetMapping' # type: str
class AdlsGen2FileDataSetMapping(DataSetMapping):
@@ -554,27 +586,29 @@ class AdlsGen2FileDataSetMapping(DataSetMapping):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set mapping.Constant filled by server. Possible values
- include: 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder',
- 'AdlsGen2File', 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase',
- 'SqlDBTable', 'SqlDWTable', 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder",
+ "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetMappingKind
:param data_set_id: Required. The id of the source data set.
:type data_set_id: str
:ivar data_set_mapping_status: Gets the status of the data set mapping. Possible values
- include: 'Ok', 'Broken'.
+ include: "Ok", "Broken".
:vartype data_set_mapping_status: str or
~data_share_management_client.models.DataSetMappingStatus
:param file_path: Required. File path within the file system.
:type file_path: str
:param file_system: Required. File system to which the file belongs.
:type file_system: str
- :param output_type: Type of output file. Possible values include: 'Csv', 'Parquet'.
+ :param output_type: Type of output file. Possible values include: "Csv", "Parquet".
:type output_type: str or ~data_share_management_client.models.OutputType
:ivar provisioning_state: Provisioning state of the data set mapping. Possible values include:
- 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed'.
+ "Succeeded", "Creating", "Deleting", "Moving", "Failed".
:vartype provisioning_state: str or ~data_share_management_client.models.ProvisioningState
:param resource_group: Required. Resource group of storage account.
:type resource_group: str
@@ -587,6 +621,7 @@ class AdlsGen2FileDataSetMapping(DataSetMapping):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'data_set_id': {'required': True},
@@ -602,6 +637,7 @@ class AdlsGen2FileDataSetMapping(DataSetMapping):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'},
@@ -628,7 +664,7 @@ def __init__(
**kwargs
):
super(AdlsGen2FileDataSetMapping, self).__init__(**kwargs)
- self.kind = 'AdlsGen2File'
+ self.kind = 'AdlsGen2File' # type: str
self.data_set_id = data_set_id
self.data_set_mapping_status = None
self.file_path = file_path
@@ -651,13 +687,15 @@ class AdlsGen2FileSystemDataSet(DataSet):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set.Constant filled by server. Possible values include:
- 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder', 'AdlsGen2File',
- 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase', 'SqlDBTable', 'SqlDWTable',
- 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File",
+ "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetKind
:ivar data_set_id: Unique id for identifying a data set resource.
:vartype data_set_id: str
:param file_system: Required. The file system name.
@@ -673,6 +711,7 @@ class AdlsGen2FileSystemDataSet(DataSet):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'data_set_id': {'readonly': True},
@@ -685,6 +724,7 @@ class AdlsGen2FileSystemDataSet(DataSet):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'},
@@ -704,7 +744,7 @@ def __init__(
**kwargs
):
super(AdlsGen2FileSystemDataSet, self).__init__(**kwargs)
- self.kind = 'AdlsGen2FileSystem'
+ self.kind = 'AdlsGen2FileSystem' # type: str
self.data_set_id = None
self.file_system = file_system
self.resource_group = resource_group
@@ -723,23 +763,25 @@ class AdlsGen2FileSystemDataSetMapping(DataSetMapping):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set mapping.Constant filled by server. Possible values
- include: 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder',
- 'AdlsGen2File', 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase',
- 'SqlDBTable', 'SqlDWTable', 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder",
+ "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetMappingKind
:param data_set_id: Required. The id of the source data set.
:type data_set_id: str
:ivar data_set_mapping_status: Gets the status of the data set mapping. Possible values
- include: 'Ok', 'Broken'.
+ include: "Ok", "Broken".
:vartype data_set_mapping_status: str or
~data_share_management_client.models.DataSetMappingStatus
:param file_system: Required. The file system name.
:type file_system: str
:ivar provisioning_state: Provisioning state of the data set mapping. Possible values include:
- 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed'.
+ "Succeeded", "Creating", "Deleting", "Moving", "Failed".
:vartype provisioning_state: str or ~data_share_management_client.models.ProvisioningState
:param resource_group: Required. Resource group of storage account.
:type resource_group: str
@@ -752,6 +794,7 @@ class AdlsGen2FileSystemDataSetMapping(DataSetMapping):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'data_set_id': {'required': True},
@@ -766,6 +809,7 @@ class AdlsGen2FileSystemDataSetMapping(DataSetMapping):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'},
@@ -788,7 +832,7 @@ def __init__(
**kwargs
):
super(AdlsGen2FileSystemDataSetMapping, self).__init__(**kwargs)
- self.kind = 'AdlsGen2FileSystem'
+ self.kind = 'AdlsGen2FileSystem' # type: str
self.data_set_id = data_set_id
self.data_set_mapping_status = None
self.file_system = file_system
@@ -809,13 +853,15 @@ class AdlsGen2FolderDataSet(DataSet):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set.Constant filled by server. Possible values include:
- 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder', 'AdlsGen2File',
- 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase', 'SqlDBTable', 'SqlDWTable',
- 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File",
+ "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetKind
:ivar data_set_id: Unique id for identifying a data set resource.
:vartype data_set_id: str
:param file_system: Required. File system to which the folder belongs.
@@ -833,6 +879,7 @@ class AdlsGen2FolderDataSet(DataSet):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'data_set_id': {'readonly': True},
@@ -846,6 +893,7 @@ class AdlsGen2FolderDataSet(DataSet):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'},
@@ -867,7 +915,7 @@ def __init__(
**kwargs
):
super(AdlsGen2FolderDataSet, self).__init__(**kwargs)
- self.kind = 'AdlsGen2Folder'
+ self.kind = 'AdlsGen2Folder' # type: str
self.data_set_id = None
self.file_system = file_system
self.folder_path = folder_path
@@ -887,17 +935,19 @@ class AdlsGen2FolderDataSetMapping(DataSetMapping):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set mapping.Constant filled by server. Possible values
- include: 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder',
- 'AdlsGen2File', 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase',
- 'SqlDBTable', 'SqlDWTable', 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder",
+ "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetMappingKind
:param data_set_id: Required. The id of the source data set.
:type data_set_id: str
:ivar data_set_mapping_status: Gets the status of the data set mapping. Possible values
- include: 'Ok', 'Broken'.
+ include: "Ok", "Broken".
:vartype data_set_mapping_status: str or
~data_share_management_client.models.DataSetMappingStatus
:param file_system: Required. File system to which the folder belongs.
@@ -905,7 +955,7 @@ class AdlsGen2FolderDataSetMapping(DataSetMapping):
:param folder_path: Required. Folder path within the file system.
:type folder_path: str
:ivar provisioning_state: Provisioning state of the data set mapping. Possible values include:
- 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed'.
+ "Succeeded", "Creating", "Deleting", "Moving", "Failed".
:vartype provisioning_state: str or ~data_share_management_client.models.ProvisioningState
:param resource_group: Required. Resource group of storage account.
:type resource_group: str
@@ -918,6 +968,7 @@ class AdlsGen2FolderDataSetMapping(DataSetMapping):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'data_set_id': {'required': True},
@@ -933,6 +984,7 @@ class AdlsGen2FolderDataSetMapping(DataSetMapping):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'},
@@ -957,7 +1009,7 @@ def __init__(
**kwargs
):
super(AdlsGen2FolderDataSetMapping, self).__init__(**kwargs)
- self.kind = 'AdlsGen2Folder'
+ self.kind = 'AdlsGen2Folder' # type: str
self.data_set_id = data_set_id
self.data_set_mapping_status = None
self.file_system = file_system
@@ -979,13 +1031,15 @@ class BlobContainerDataSet(DataSet):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set.Constant filled by server. Possible values include:
- 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder', 'AdlsGen2File',
- 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase', 'SqlDBTable', 'SqlDWTable',
- 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File",
+ "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetKind
:param container_name: Required. BLOB Container name.
:type container_name: str
:ivar data_set_id: Unique id for identifying a data set resource.
@@ -1001,6 +1055,7 @@ class BlobContainerDataSet(DataSet):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'container_name': {'required': True},
@@ -1013,6 +1068,7 @@ class BlobContainerDataSet(DataSet):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'container_name': {'key': 'properties.containerName', 'type': 'str'},
@@ -1032,7 +1088,7 @@ def __init__(
**kwargs
):
super(BlobContainerDataSet, self).__init__(**kwargs)
- self.kind = 'Container'
+ self.kind = 'Container' # type: str
self.container_name = container_name
self.data_set_id = None
self.resource_group = resource_group
@@ -1051,23 +1107,25 @@ class BlobContainerDataSetMapping(DataSetMapping):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set mapping.Constant filled by server. Possible values
- include: 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder',
- 'AdlsGen2File', 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase',
- 'SqlDBTable', 'SqlDWTable', 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder",
+ "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetMappingKind
:param container_name: Required. BLOB Container name.
:type container_name: str
:param data_set_id: Required. The id of the source data set.
:type data_set_id: str
:ivar data_set_mapping_status: Gets the status of the data set mapping. Possible values
- include: 'Ok', 'Broken'.
+ include: "Ok", "Broken".
:vartype data_set_mapping_status: str or
~data_share_management_client.models.DataSetMappingStatus
:ivar provisioning_state: Provisioning state of the data set mapping. Possible values include:
- 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed'.
+ "Succeeded", "Creating", "Deleting", "Moving", "Failed".
:vartype provisioning_state: str or ~data_share_management_client.models.ProvisioningState
:param resource_group: Required. Resource group of storage account.
:type resource_group: str
@@ -1080,6 +1138,7 @@ class BlobContainerDataSetMapping(DataSetMapping):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'container_name': {'required': True},
@@ -1094,6 +1153,7 @@ class BlobContainerDataSetMapping(DataSetMapping):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'container_name': {'key': 'properties.containerName', 'type': 'str'},
@@ -1116,7 +1176,7 @@ def __init__(
**kwargs
):
super(BlobContainerDataSetMapping, self).__init__(**kwargs)
- self.kind = 'Container'
+ self.kind = 'Container' # type: str
self.container_name = container_name
self.data_set_id = data_set_id
self.data_set_mapping_status = None
@@ -1137,13 +1197,15 @@ class BlobDataSet(DataSet):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set.Constant filled by server. Possible values include:
- 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder', 'AdlsGen2File',
- 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase', 'SqlDBTable', 'SqlDWTable',
- 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File",
+ "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetKind
:param container_name: Required. Container that has the file path.
:type container_name: str
:ivar data_set_id: Unique id for identifying a data set resource.
@@ -1161,6 +1223,7 @@ class BlobDataSet(DataSet):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'container_name': {'required': True},
@@ -1174,6 +1237,7 @@ class BlobDataSet(DataSet):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'container_name': {'key': 'properties.containerName', 'type': 'str'},
@@ -1195,7 +1259,7 @@ def __init__(
**kwargs
):
super(BlobDataSet, self).__init__(**kwargs)
- self.kind = 'Blob'
+ self.kind = 'Blob' # type: str
self.container_name = container_name
self.data_set_id = None
self.file_path = file_path
@@ -1215,27 +1279,29 @@ class BlobDataSetMapping(DataSetMapping):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set mapping.Constant filled by server. Possible values
- include: 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder',
- 'AdlsGen2File', 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase',
- 'SqlDBTable', 'SqlDWTable', 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder",
+ "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetMappingKind
:param container_name: Required. Container that has the file path.
:type container_name: str
:param data_set_id: Required. The id of the source data set.
:type data_set_id: str
:ivar data_set_mapping_status: Gets the status of the data set mapping. Possible values
- include: 'Ok', 'Broken'.
+ include: "Ok", "Broken".
:vartype data_set_mapping_status: str or
~data_share_management_client.models.DataSetMappingStatus
:param file_path: Required. File path within the source data set.
:type file_path: str
- :param output_type: File output type. Possible values include: 'Csv', 'Parquet'.
+ :param output_type: File output type. Possible values include: "Csv", "Parquet".
:type output_type: str or ~data_share_management_client.models.OutputType
:ivar provisioning_state: Provisioning state of the data set mapping. Possible values include:
- 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed'.
+ "Succeeded", "Creating", "Deleting", "Moving", "Failed".
:vartype provisioning_state: str or ~data_share_management_client.models.ProvisioningState
:param resource_group: Required. Resource group of storage account.
:type resource_group: str
@@ -1248,6 +1314,7 @@ class BlobDataSetMapping(DataSetMapping):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'container_name': {'required': True},
@@ -1263,6 +1330,7 @@ class BlobDataSetMapping(DataSetMapping):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'container_name': {'key': 'properties.containerName', 'type': 'str'},
@@ -1289,7 +1357,7 @@ def __init__(
**kwargs
):
super(BlobDataSetMapping, self).__init__(**kwargs)
- self.kind = 'Blob'
+ self.kind = 'Blob' # type: str
self.container_name = container_name
self.data_set_id = data_set_id
self.data_set_mapping_status = None
@@ -1312,13 +1380,15 @@ class BlobFolderDataSet(DataSet):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set.Constant filled by server. Possible values include:
- 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder', 'AdlsGen2File',
- 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase', 'SqlDBTable', 'SqlDWTable',
- 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File",
+ "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetKind
:param container_name: Required. Container that has the file path.
:type container_name: str
:ivar data_set_id: Unique id for identifying a data set resource.
@@ -1336,6 +1406,7 @@ class BlobFolderDataSet(DataSet):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'container_name': {'required': True},
@@ -1349,6 +1420,7 @@ class BlobFolderDataSet(DataSet):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'container_name': {'key': 'properties.containerName', 'type': 'str'},
@@ -1370,7 +1442,7 @@ def __init__(
**kwargs
):
super(BlobFolderDataSet, self).__init__(**kwargs)
- self.kind = 'BlobFolder'
+ self.kind = 'BlobFolder' # type: str
self.container_name = container_name
self.data_set_id = None
self.prefix = prefix
@@ -1390,25 +1462,27 @@ class BlobFolderDataSetMapping(DataSetMapping):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set mapping.Constant filled by server. Possible values
- include: 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder',
- 'AdlsGen2File', 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase',
- 'SqlDBTable', 'SqlDWTable', 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder",
+ "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetMappingKind
:param container_name: Required. Container that has the file path.
:type container_name: str
:param data_set_id: Required. The id of the source data set.
:type data_set_id: str
:ivar data_set_mapping_status: Gets the status of the data set mapping. Possible values
- include: 'Ok', 'Broken'.
+ include: "Ok", "Broken".
:vartype data_set_mapping_status: str or
~data_share_management_client.models.DataSetMappingStatus
:param prefix: Required. Prefix for blob folder.
:type prefix: str
:ivar provisioning_state: Provisioning state of the data set mapping. Possible values include:
- 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed'.
+ "Succeeded", "Creating", "Deleting", "Moving", "Failed".
:vartype provisioning_state: str or ~data_share_management_client.models.ProvisioningState
:param resource_group: Required. Resource group of storage account.
:type resource_group: str
@@ -1421,6 +1495,7 @@ class BlobFolderDataSetMapping(DataSetMapping):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'container_name': {'required': True},
@@ -1436,6 +1511,7 @@ class BlobFolderDataSetMapping(DataSetMapping):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'container_name': {'key': 'properties.containerName', 'type': 'str'},
@@ -1460,7 +1536,7 @@ def __init__(
**kwargs
):
super(BlobFolderDataSetMapping, self).__init__(**kwargs)
- self.kind = 'BlobFolder'
+ self.kind = 'BlobFolder' # type: str
self.container_name = container_name
self.data_set_id = data_set_id
self.data_set_mapping_status = None
@@ -1482,16 +1558,21 @@ class ConsumerInvitation(ProxyDto):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:ivar data_set_count: Number of data sets in a share.
:vartype data_set_count: int
:ivar description: Description shared when the invitation was created.
:vartype description: str
+ :ivar expiration_date: The expiration date for the share subscription created by accepting the
+ invitation.
+ :vartype expiration_date: ~datetime.datetime
:param invitation_id: Required. Unique id of the invitation.
:type invitation_id: str
- :ivar invitation_status: The status of the invitation. Possible values include: 'Pending',
- 'Accepted', 'Rejected', 'Withdrawn'.
+ :ivar invitation_status: The status of the invitation. Possible values include: "Pending",
+ "Accepted", "Rejected", "Withdrawn".
:vartype invitation_status: str or ~data_share_management_client.models.InvitationStatus
:ivar location: invitation location.
:vartype location: str
@@ -1518,9 +1599,11 @@ class ConsumerInvitation(ProxyDto):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'data_set_count': {'readonly': True},
'description': {'readonly': True},
+ 'expiration_date': {'readonly': True},
'invitation_id': {'required': True},
'invitation_status': {'readonly': True},
'location': {'readonly': True},
@@ -1538,9 +1621,11 @@ class ConsumerInvitation(ProxyDto):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'data_set_count': {'key': 'properties.dataSetCount', 'type': 'int'},
'description': {'key': 'properties.description', 'type': 'str'},
+ 'expiration_date': {'key': 'properties.expirationDate', 'type': 'iso-8601'},
'invitation_id': {'key': 'properties.invitationId', 'type': 'str'},
'invitation_status': {'key': 'properties.invitationStatus', 'type': 'str'},
'location': {'key': 'properties.location', 'type': 'str'},
@@ -1564,6 +1649,7 @@ def __init__(
super(ConsumerInvitation, self).__init__(**kwargs)
self.data_set_count = None
self.description = None
+ self.expiration_date = None
self.invitation_id = invitation_id
self.invitation_status = None
self.location = None
@@ -1619,6 +1705,8 @@ class ConsumerSourceDataSet(ProxyDto):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:ivar data_set_id: DataSet Id.
@@ -1629,15 +1717,17 @@ class ConsumerSourceDataSet(ProxyDto):
:vartype data_set_name: str
:ivar data_set_path: DataSet path.
:vartype data_set_path: str
- :ivar data_set_type: Type of data set. Possible values include: 'Blob', 'Container',
- 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder', 'AdlsGen2File', 'AdlsGen1Folder',
- 'AdlsGen1File', 'KustoCluster', 'KustoDatabase', 'SqlDBTable', 'SqlDWTable'.
+ :ivar data_set_type: Type of data set. Possible values include: "Blob", "Container",
+ "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File", "AdlsGen1Folder",
+ "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
:vartype data_set_type: str or ~data_share_management_client.models.DataSetType
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'data_set_id': {'readonly': True},
'data_set_location': {'readonly': True},
@@ -1649,6 +1739,7 @@ class ConsumerSourceDataSet(ProxyDto):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'},
'data_set_location': {'key': 'properties.dataSetLocation', 'type': 'str'},
@@ -1870,14 +1961,13 @@ class Identity(msrest.serialization.Model):
:vartype principal_id: str
:ivar tenant_id: Tenant Id.
:vartype tenant_id: str
- :ivar type: Identity Type. Default value: "SystemAssigned".
- :vartype type: str
+ :param type: Identity Type. Possible values include: "SystemAssigned".
+ :type type: str or ~data_share_management_client.models.Type
"""
_validation = {
'principal_id': {'readonly': True},
'tenant_id': {'readonly': True},
- 'type': {'constant': True},
}
_attribute_map = {
@@ -1886,15 +1976,16 @@ class Identity(msrest.serialization.Model):
'type': {'key': 'type', 'type': 'str'},
}
- type = "SystemAssigned"
-
def __init__(
self,
+ *,
+ type: Optional[Union[str, "Type"]] = None,
**kwargs
):
super(Identity, self).__init__(**kwargs)
self.principal_id = None
self.tenant_id = None
+ self.type = type
class Invitation(ProxyDto):
@@ -1906,12 +1997,16 @@ class Invitation(ProxyDto):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
+ :param expiration_date: The expiration date for the invitation and share subscription.
+ :type expiration_date: ~datetime.datetime
:ivar invitation_id: unique invitation id.
:vartype invitation_id: str
- :ivar invitation_status: The status of the invitation. Possible values include: 'Pending',
- 'Accepted', 'Rejected', 'Withdrawn'.
+ :ivar invitation_status: The status of the invitation. Possible values include: "Pending",
+ "Accepted", "Rejected", "Withdrawn".
:vartype invitation_status: str or ~data_share_management_client.models.InvitationStatus
:ivar responded_at: The time the recipient responded to the invitation.
:vartype responded_at: ~datetime.datetime
@@ -1934,6 +2029,7 @@ class Invitation(ProxyDto):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'invitation_id': {'readonly': True},
'invitation_status': {'readonly': True},
@@ -1946,7 +2042,9 @@ class Invitation(ProxyDto):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
+ 'expiration_date': {'key': 'properties.expirationDate', 'type': 'iso-8601'},
'invitation_id': {'key': 'properties.invitationId', 'type': 'str'},
'invitation_status': {'key': 'properties.invitationStatus', 'type': 'str'},
'responded_at': {'key': 'properties.respondedAt', 'type': 'iso-8601'},
@@ -1961,12 +2059,14 @@ class Invitation(ProxyDto):
def __init__(
self,
*,
+ expiration_date: Optional[datetime.datetime] = None,
target_active_directory_id: Optional[str] = None,
target_email: Optional[str] = None,
target_object_id: Optional[str] = None,
**kwargs
):
super(Invitation, self).__init__(**kwargs)
+ self.expiration_date = expiration_date
self.invitation_id = None
self.invitation_status = None
self.responded_at = None
@@ -2021,13 +2121,15 @@ class KustoClusterDataSet(DataSet):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set.Constant filled by server. Possible values include:
- 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder', 'AdlsGen2File',
- 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase', 'SqlDBTable', 'SqlDWTable',
- 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File",
+ "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetKind
:ivar data_set_id: Unique id for identifying a data set resource.
:vartype data_set_id: str
:param kusto_cluster_resource_id: Required. Resource id of the kusto cluster.
@@ -2035,13 +2137,14 @@ class KustoClusterDataSet(DataSet):
:ivar location: Location of the kusto cluster.
:vartype location: str
:ivar provisioning_state: Provisioning state of the kusto cluster data set. Possible values
- include: 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed'.
+ include: "Succeeded", "Creating", "Deleting", "Moving", "Failed".
:vartype provisioning_state: str or ~data_share_management_client.models.ProvisioningState
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'data_set_id': {'readonly': True},
@@ -2053,6 +2156,7 @@ class KustoClusterDataSet(DataSet):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'},
@@ -2068,7 +2172,7 @@ def __init__(
**kwargs
):
super(KustoClusterDataSet, self).__init__(**kwargs)
- self.kind = 'KustoCluster'
+ self.kind = 'KustoCluster' # type: str
self.data_set_id = None
self.kusto_cluster_resource_id = kusto_cluster_resource_id
self.location = None
@@ -2086,17 +2190,19 @@ class KustoClusterDataSetMapping(DataSetMapping):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set mapping.Constant filled by server. Possible values
- include: 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder',
- 'AdlsGen2File', 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase',
- 'SqlDBTable', 'SqlDWTable', 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder",
+ "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetMappingKind
:param data_set_id: Required. The id of the source data set.
:type data_set_id: str
:ivar data_set_mapping_status: Gets the status of the data set mapping. Possible values
- include: 'Ok', 'Broken'.
+ include: "Ok", "Broken".
:vartype data_set_mapping_status: str or
~data_share_management_client.models.DataSetMappingStatus
:param kusto_cluster_resource_id: Required. Resource id of the sink kusto cluster.
@@ -2104,13 +2210,14 @@ class KustoClusterDataSetMapping(DataSetMapping):
:ivar location: Location of the sink kusto cluster.
:vartype location: str
:ivar provisioning_state: Provisioning state of the data set mapping. Possible values include:
- 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed'.
+ "Succeeded", "Creating", "Deleting", "Moving", "Failed".
:vartype provisioning_state: str or ~data_share_management_client.models.ProvisioningState
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'data_set_id': {'required': True},
@@ -2123,6 +2230,7 @@ class KustoClusterDataSetMapping(DataSetMapping):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'},
@@ -2140,7 +2248,7 @@ def __init__(
**kwargs
):
super(KustoClusterDataSetMapping, self).__init__(**kwargs)
- self.kind = 'KustoCluster'
+ self.kind = 'KustoCluster' # type: str
self.data_set_id = data_set_id
self.data_set_mapping_status = None
self.kusto_cluster_resource_id = kusto_cluster_resource_id
@@ -2159,13 +2267,15 @@ class KustoDatabaseDataSet(DataSet):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set.Constant filled by server. Possible values include:
- 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder', 'AdlsGen2File',
- 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase', 'SqlDBTable', 'SqlDWTable',
- 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File",
+ "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetKind
:ivar data_set_id: Unique id for identifying a data set resource.
:vartype data_set_id: str
:param kusto_database_resource_id: Required. Resource id of the kusto database.
@@ -2173,13 +2283,14 @@ class KustoDatabaseDataSet(DataSet):
:ivar location: Location of the kusto cluster.
:vartype location: str
:ivar provisioning_state: Provisioning state of the kusto database data set. Possible values
- include: 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed'.
+ include: "Succeeded", "Creating", "Deleting", "Moving", "Failed".
:vartype provisioning_state: str or ~data_share_management_client.models.ProvisioningState
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'data_set_id': {'readonly': True},
@@ -2191,6 +2302,7 @@ class KustoDatabaseDataSet(DataSet):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'},
@@ -2206,7 +2318,7 @@ def __init__(
**kwargs
):
super(KustoDatabaseDataSet, self).__init__(**kwargs)
- self.kind = 'KustoDatabase'
+ self.kind = 'KustoDatabase' # type: str
self.data_set_id = None
self.kusto_database_resource_id = kusto_database_resource_id
self.location = None
@@ -2224,17 +2336,19 @@ class KustoDatabaseDataSetMapping(DataSetMapping):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set mapping.Constant filled by server. Possible values
- include: 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder',
- 'AdlsGen2File', 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase',
- 'SqlDBTable', 'SqlDWTable', 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder",
+ "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetMappingKind
:param data_set_id: Required. The id of the source data set.
:type data_set_id: str
:ivar data_set_mapping_status: Gets the status of the data set mapping. Possible values
- include: 'Ok', 'Broken'.
+ include: "Ok", "Broken".
:vartype data_set_mapping_status: str or
~data_share_management_client.models.DataSetMappingStatus
:param kusto_cluster_resource_id: Required. Resource id of the sink kusto cluster.
@@ -2242,13 +2356,14 @@ class KustoDatabaseDataSetMapping(DataSetMapping):
:ivar location: Location of the sink kusto cluster.
:vartype location: str
:ivar provisioning_state: Provisioning state of the data set mapping. Possible values include:
- 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed'.
+ "Succeeded", "Creating", "Deleting", "Moving", "Failed".
:vartype provisioning_state: str or ~data_share_management_client.models.ProvisioningState
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'data_set_id': {'required': True},
@@ -2261,6 +2376,7 @@ class KustoDatabaseDataSetMapping(DataSetMapping):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'},
@@ -2278,7 +2394,7 @@ def __init__(
**kwargs
):
super(KustoDatabaseDataSetMapping, self).__init__(**kwargs)
- self.kind = 'KustoDatabase'
+ self.kind = 'KustoDatabase' # type: str
self.data_set_id = data_set_id
self.data_set_mapping_status = None
self.kusto_cluster_resource_id = kusto_cluster_resource_id
@@ -2362,6 +2478,8 @@ class OperationMetaMetricSpecification(msrest.serialization.Model):
:type display_name: str
:param enable_regional_mdm_account: enable regional mdm account.
:type enable_regional_mdm_account: str
+ :param fill_gap_with_zero: fill gap with zero.
+ :type fill_gap_with_zero: bool
:param internal_metric_name: internal metric name.
:type internal_metric_name: str
:param name: name of the metric.
@@ -2383,6 +2501,7 @@ class OperationMetaMetricSpecification(msrest.serialization.Model):
'display_description': {'key': 'displayDescription', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'enable_regional_mdm_account': {'key': 'enableRegionalMdmAccount', 'type': 'str'},
+ 'fill_gap_with_zero': {'key': 'fillGapWithZero', 'type': 'bool'},
'internal_metric_name': {'key': 'internalMetricName', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'resource_id_dimension_name_override': {'key': 'resourceIdDimensionNameOverride', 'type': 'str'},
@@ -2399,6 +2518,7 @@ def __init__(
display_description: Optional[str] = None,
display_name: Optional[str] = None,
enable_regional_mdm_account: Optional[str] = None,
+ fill_gap_with_zero: Optional[bool] = None,
internal_metric_name: Optional[str] = None,
name: Optional[str] = None,
resource_id_dimension_name_override: Optional[str] = None,
@@ -2413,6 +2533,7 @@ def __init__(
self.display_description = display_description
self.display_name = display_name
self.enable_regional_mdm_account = enable_regional_mdm_account
+ self.fill_gap_with_zero = fill_gap_with_zero
self.internal_metric_name = internal_metric_name
self.name = name
self.resource_id_dimension_name_override = resource_id_dimension_name_override
@@ -2534,7 +2655,7 @@ class OperationResponse(msrest.serialization.Model):
:param start_time: start time.
:type start_time: ~datetime.datetime
:param status: Required. Operation state of the long running operation. Possible values
- include: 'Accepted', 'InProgress', 'TransientFailure', 'Succeeded', 'Failed', 'Canceled'.
+ include: "Accepted", "InProgress", "TransientFailure", "Succeeded", "Failed", "Canceled".
:type status: str or ~data_share_management_client.models.Status
"""
@@ -2574,6 +2695,8 @@ class ProviderShareSubscription(ProxyDto):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:ivar consumer_email: Email of the consumer who created the share subscription.
@@ -2584,6 +2707,8 @@ class ProviderShareSubscription(ProxyDto):
:vartype consumer_tenant_name: str
:ivar created_at: created at.
:vartype created_at: ~datetime.datetime
+ :param expiration_date: Expiration date of the share subscription in UTC format.
+ :type expiration_date: ~datetime.datetime
:ivar provider_email: Email of the provider who created the share.
:vartype provider_email: str
:ivar provider_name: Name of the provider who created the share.
@@ -2593,7 +2718,7 @@ class ProviderShareSubscription(ProxyDto):
:ivar share_subscription_object_id: share Subscription Object Id.
:vartype share_subscription_object_id: str
:ivar share_subscription_status: Gets the status of share subscription. Possible values
- include: 'Active', 'Revoked', 'SourceDeleted', 'Revoking'.
+ include: "Active", "Revoked", "SourceDeleted", "Revoking".
:vartype share_subscription_status: str or
~data_share_management_client.models.ShareSubscriptionStatus
"""
@@ -2601,6 +2726,7 @@ class ProviderShareSubscription(ProxyDto):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'consumer_email': {'readonly': True},
'consumer_name': {'readonly': True},
@@ -2616,11 +2742,13 @@ class ProviderShareSubscription(ProxyDto):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'consumer_email': {'key': 'properties.consumerEmail', 'type': 'str'},
'consumer_name': {'key': 'properties.consumerName', 'type': 'str'},
'consumer_tenant_name': {'key': 'properties.consumerTenantName', 'type': 'str'},
'created_at': {'key': 'properties.createdAt', 'type': 'iso-8601'},
+ 'expiration_date': {'key': 'properties.expirationDate', 'type': 'iso-8601'},
'provider_email': {'key': 'properties.providerEmail', 'type': 'str'},
'provider_name': {'key': 'properties.providerName', 'type': 'str'},
'shared_at': {'key': 'properties.sharedAt', 'type': 'iso-8601'},
@@ -2630,6 +2758,8 @@ class ProviderShareSubscription(ProxyDto):
def __init__(
self,
+ *,
+ expiration_date: Optional[datetime.datetime] = None,
**kwargs
):
super(ProviderShareSubscription, self).__init__(**kwargs)
@@ -2637,6 +2767,7 @@ def __init__(
self.consumer_name = None
self.consumer_tenant_name = None
self.created_at = None
+ self.expiration_date = expiration_date
self.provider_email = None
self.provider_name = None
self.shared_at = None
@@ -2684,11 +2815,9 @@ class SourceShareSynchronizationSetting(msrest.serialization.Model):
All required parameters must be populated in order to send to Azure.
- :param kind: Required. Kind of synchronization.Constant filled by server. Possible values
- include: 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder',
- 'AdlsGen2File', 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase',
- 'SqlDBTable', 'SqlDWTable', 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ :param kind: Required. Kind of synchronization setting on share.Constant filled by server.
+ Possible values include: "ScheduleBased".
+ :type kind: str or ~data_share_management_client.models.SourceShareSynchronizationSettingKind
"""
_validation = {
@@ -2708,7 +2837,7 @@ def __init__(
**kwargs
):
super(SourceShareSynchronizationSetting, self).__init__(**kwargs)
- self.kind = None
+ self.kind = None # type: Optional[str]
class ScheduledSourceSynchronizationSetting(SourceShareSynchronizationSetting):
@@ -2716,12 +2845,10 @@ class ScheduledSourceSynchronizationSetting(SourceShareSynchronizationSetting):
All required parameters must be populated in order to send to Azure.
- :param kind: Required. Kind of synchronization.Constant filled by server. Possible values
- include: 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder',
- 'AdlsGen2File', 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase',
- 'SqlDBTable', 'SqlDWTable', 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
- :param recurrence_interval: Recurrence Interval. Possible values include: 'Hour', 'Day'.
+ :param kind: Required. Kind of synchronization setting on share.Constant filled by server.
+ Possible values include: "ScheduleBased".
+ :type kind: str or ~data_share_management_client.models.SourceShareSynchronizationSettingKind
+ :param recurrence_interval: Recurrence Interval. Possible values include: "Hour", "Day".
:type recurrence_interval: str or ~data_share_management_client.models.RecurrenceInterval
:param synchronization_time: Synchronization time.
:type synchronization_time: ~datetime.datetime
@@ -2745,7 +2872,7 @@ def __init__(
**kwargs
):
super(ScheduledSourceSynchronizationSetting, self).__init__(**kwargs)
- self.kind = 'ScheduleBased'
+ self.kind = 'ScheduleBased' # type: str
self.recurrence_interval = recurrence_interval
self.synchronization_time = synchronization_time
@@ -2764,18 +2891,19 @@ class SynchronizationSetting(ProxyDto):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
- :param kind: Required. Kind of synchronization.Constant filled by server. Possible values
- include: 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder',
- 'AdlsGen2File', 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase',
- 'SqlDBTable', 'SqlDWTable', 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ :param kind: Required. Kind of synchronization setting.Constant filled by server. Possible
+ values include: "ScheduleBased".
+ :type kind: str or ~data_share_management_client.models.SynchronizationSettingKind
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
}
@@ -2783,6 +2911,7 @@ class SynchronizationSetting(ProxyDto):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
}
@@ -2796,7 +2925,7 @@ def __init__(
**kwargs
):
super(SynchronizationSetting, self).__init__(**kwargs)
- self.kind = 'SynchronizationSetting'
+ self.kind = 'SynchronizationSetting' # type: str
class ScheduledSynchronizationSetting(SynchronizationSetting):
@@ -2810,20 +2939,20 @@ class ScheduledSynchronizationSetting(SynchronizationSetting):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
- :param kind: Required. Kind of synchronization.Constant filled by server. Possible values
- include: 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder',
- 'AdlsGen2File', 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase',
- 'SqlDBTable', 'SqlDWTable', 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ :param kind: Required. Kind of synchronization setting.Constant filled by server. Possible
+ values include: "ScheduleBased".
+ :type kind: str or ~data_share_management_client.models.SynchronizationSettingKind
:ivar created_at: Time at which the synchronization setting was created.
:vartype created_at: ~datetime.datetime
:ivar provisioning_state: Gets or sets the provisioning state. Possible values include:
- 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed'.
+ "Succeeded", "Creating", "Deleting", "Moving", "Failed".
:vartype provisioning_state: str or ~data_share_management_client.models.ProvisioningState
- :param recurrence_interval: Required. Recurrence Interval. Possible values include: 'Hour',
- 'Day'.
+ :param recurrence_interval: Required. Recurrence Interval. Possible values include: "Hour",
+ "Day".
:type recurrence_interval: str or ~data_share_management_client.models.RecurrenceInterval
:param synchronization_time: Required. Synchronization time.
:type synchronization_time: ~datetime.datetime
@@ -2834,6 +2963,7 @@ class ScheduledSynchronizationSetting(SynchronizationSetting):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'created_at': {'readonly': True},
@@ -2846,6 +2976,7 @@ class ScheduledSynchronizationSetting(SynchronizationSetting):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'created_at': {'key': 'properties.createdAt', 'type': 'iso-8601'},
@@ -2863,7 +2994,7 @@ def __init__(
**kwargs
):
super(ScheduledSynchronizationSetting, self).__init__(**kwargs)
- self.kind = 'ScheduleBased'
+ self.kind = 'ScheduleBased' # type: str
self.created_at = None
self.provisioning_state = None
self.recurrence_interval = recurrence_interval
@@ -2885,18 +3016,19 @@ class Trigger(ProxyDto):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
- :param kind: Required. Kind of synchronization.Constant filled by server. Possible values
- include: 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder',
- 'AdlsGen2File', 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase',
- 'SqlDBTable', 'SqlDWTable', 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ :param kind: Required. Kind of synchronization on trigger.Constant filled by server. Possible
+ values include: "ScheduleBased".
+ :type kind: str or ~data_share_management_client.models.TriggerKind
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
}
@@ -2904,6 +3036,7 @@ class Trigger(ProxyDto):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
}
@@ -2917,7 +3050,7 @@ def __init__(
**kwargs
):
super(Trigger, self).__init__(**kwargs)
- self.kind = 'Trigger'
+ self.kind = 'Trigger' # type: str
class ScheduledTrigger(Trigger):
@@ -2931,28 +3064,28 @@ class ScheduledTrigger(Trigger):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
- :param kind: Required. Kind of synchronization.Constant filled by server. Possible values
- include: 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder',
- 'AdlsGen2File', 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase',
- 'SqlDBTable', 'SqlDWTable', 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ :param kind: Required. Kind of synchronization on trigger.Constant filled by server. Possible
+ values include: "ScheduleBased".
+ :type kind: str or ~data_share_management_client.models.TriggerKind
:ivar created_at: Time at which the trigger was created.
:vartype created_at: ~datetime.datetime
- :ivar provisioning_state: Gets the provisioning state. Possible values include: 'Succeeded',
- 'Creating', 'Deleting', 'Moving', 'Failed'.
+ :ivar provisioning_state: Gets the provisioning state. Possible values include: "Succeeded",
+ "Creating", "Deleting", "Moving", "Failed".
:vartype provisioning_state: str or ~data_share_management_client.models.ProvisioningState
- :param recurrence_interval: Required. Recurrence Interval. Possible values include: 'Hour',
- 'Day'.
+ :param recurrence_interval: Required. Recurrence Interval. Possible values include: "Hour",
+ "Day".
:type recurrence_interval: str or ~data_share_management_client.models.RecurrenceInterval
- :param synchronization_mode: Synchronization mode. Possible values include: 'Incremental',
- 'FullSync'.
+ :param synchronization_mode: Synchronization mode. Possible values include: "Incremental",
+ "FullSync".
:type synchronization_mode: str or ~data_share_management_client.models.SynchronizationMode
:param synchronization_time: Required. Synchronization time.
:type synchronization_time: ~datetime.datetime
- :ivar trigger_status: Gets the trigger state. Possible values include: 'Active', 'Inactive',
- 'SourceSynchronizationSettingDeleted'.
+ :ivar trigger_status: Gets the trigger state. Possible values include: "Active", "Inactive",
+ "SourceSynchronizationSettingDeleted".
:vartype trigger_status: str or ~data_share_management_client.models.TriggerStatus
:ivar user_name: Name of the user who created the trigger.
:vartype user_name: str
@@ -2961,6 +3094,7 @@ class ScheduledTrigger(Trigger):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'created_at': {'readonly': True},
@@ -2974,6 +3108,7 @@ class ScheduledTrigger(Trigger):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'created_at': {'key': 'properties.createdAt', 'type': 'iso-8601'},
@@ -2994,7 +3129,7 @@ def __init__(
**kwargs
):
super(ScheduledTrigger, self).__init__(**kwargs)
- self.kind = 'ScheduleBased'
+ self.kind = 'ScheduleBased' # type: str
self.created_at = None
self.provisioning_state = None
self.recurrence_interval = recurrence_interval
@@ -3013,6 +3148,8 @@ class Share(ProxyDto):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:ivar created_at: Time at which the share was created.
@@ -3020,9 +3157,9 @@ class Share(ProxyDto):
:param description: Share description.
:type description: str
:ivar provisioning_state: Gets or sets the provisioning state. Possible values include:
- 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed'.
+ "Succeeded", "Creating", "Deleting", "Moving", "Failed".
:vartype provisioning_state: str or ~data_share_management_client.models.ProvisioningState
- :param share_kind: Share kind. Possible values include: 'CopyBased', 'InPlace'.
+ :param share_kind: Share kind. Possible values include: "CopyBased", "InPlace".
:type share_kind: str or ~data_share_management_client.models.ShareKind
:param terms: Share terms.
:type terms: str
@@ -3035,6 +3172,7 @@ class Share(ProxyDto):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'created_at': {'readonly': True},
'provisioning_state': {'readonly': True},
@@ -3045,6 +3183,7 @@ class Share(ProxyDto):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'created_at': {'key': 'properties.createdAt', 'type': 'iso-8601'},
'description': {'key': 'properties.description', 'type': 'str'},
@@ -3116,10 +3255,14 @@ class ShareSubscription(ProxyDto):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:ivar created_at: Time at which the share subscription was created.
:vartype created_at: ~datetime.datetime
+ :param expiration_date: The expiration date of the share subscription.
+ :type expiration_date: ~datetime.datetime
:param invitation_id: Required. The invitation id.
:type invitation_id: str
:ivar provider_email: Email of the provider who created the resource.
@@ -3129,16 +3272,16 @@ class ShareSubscription(ProxyDto):
:ivar provider_tenant_name: Tenant name of the provider who created the resource.
:vartype provider_tenant_name: str
:ivar provisioning_state: Provisioning state of the share subscription. Possible values
- include: 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed'.
+ include: "Succeeded", "Creating", "Deleting", "Moving", "Failed".
:vartype provisioning_state: str or ~data_share_management_client.models.ProvisioningState
:ivar share_description: Description of share.
:vartype share_description: str
- :ivar share_kind: Kind of share. Possible values include: 'CopyBased', 'InPlace'.
+ :ivar share_kind: Kind of share. Possible values include: "CopyBased", "InPlace".
:vartype share_kind: str or ~data_share_management_client.models.ShareKind
:ivar share_name: Name of the share.
:vartype share_name: str
:ivar share_subscription_status: Gets the current status of share subscription. Possible values
- include: 'Active', 'Revoked', 'SourceDeleted', 'Revoking'.
+ include: "Active", "Revoked", "SourceDeleted", "Revoking".
:vartype share_subscription_status: str or
~data_share_management_client.models.ShareSubscriptionStatus
:ivar share_terms: Terms of a share.
@@ -3154,6 +3297,7 @@ class ShareSubscription(ProxyDto):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'created_at': {'readonly': True},
'invitation_id': {'required': True},
@@ -3174,8 +3318,10 @@ class ShareSubscription(ProxyDto):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'created_at': {'key': 'properties.createdAt', 'type': 'iso-8601'},
+ 'expiration_date': {'key': 'properties.expirationDate', 'type': 'iso-8601'},
'invitation_id': {'key': 'properties.invitationId', 'type': 'str'},
'provider_email': {'key': 'properties.providerEmail', 'type': 'str'},
'provider_name': {'key': 'properties.providerName', 'type': 'str'},
@@ -3196,10 +3342,12 @@ def __init__(
*,
invitation_id: str,
source_share_location: str,
+ expiration_date: Optional[datetime.datetime] = None,
**kwargs
):
super(ShareSubscription, self).__init__(**kwargs)
self.created_at = None
+ self.expiration_date = expiration_date
self.invitation_id = invitation_id
self.provider_email = None
self.provider_name = None
@@ -3266,8 +3414,8 @@ class ShareSubscriptionSynchronization(msrest.serialization.Model):
:vartype status: str
:param synchronization_id: Required. Synchronization id.
:type synchronization_id: str
- :ivar synchronization_mode: Synchronization Mode. Possible values include: 'Incremental',
- 'FullSync'.
+ :ivar synchronization_mode: Synchronization Mode. Possible values include: "Incremental",
+ "FullSync".
:vartype synchronization_mode: str or ~data_share_management_client.models.SynchronizationMode
"""
@@ -3362,8 +3510,8 @@ class ShareSynchronization(msrest.serialization.Model):
:type status: str
:param synchronization_id: Synchronization id.
:type synchronization_id: str
- :ivar synchronization_mode: Synchronization mode. Possible values include: 'Incremental',
- 'FullSync'.
+ :ivar synchronization_mode: Synchronization mode. Possible values include: "Incremental",
+ "FullSync".
:vartype synchronization_mode: str or ~data_share_management_client.models.SynchronizationMode
"""
@@ -3475,7 +3623,7 @@ def __init__(
self.value = value
-class SqlDBTableDataSet(DataSet):
+class SqlDbTableDataSet(DataSet):
"""A SQL DB table data set.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -3486,13 +3634,15 @@ class SqlDBTableDataSet(DataSet):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set.Constant filled by server. Possible values include:
- 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder', 'AdlsGen2File',
- 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase', 'SqlDBTable', 'SqlDWTable',
- 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File",
+ "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetKind
:param database_name: Database name of the source data set.
:type database_name: str
:ivar data_set_id: Unique id for identifying a data set resource.
@@ -3508,6 +3658,7 @@ class SqlDBTableDataSet(DataSet):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'data_set_id': {'readonly': True},
@@ -3516,6 +3667,7 @@ class SqlDBTableDataSet(DataSet):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'database_name': {'key': 'properties.databaseName', 'type': 'str'},
@@ -3534,8 +3686,8 @@ def __init__(
table_name: Optional[str] = None,
**kwargs
):
- super(SqlDBTableDataSet, self).__init__(**kwargs)
- self.kind = 'SqlDBTable'
+ super(SqlDbTableDataSet, self).__init__(**kwargs)
+ self.kind = 'SqlDBTable' # type: str
self.database_name = database_name
self.data_set_id = None
self.schema_name = schema_name
@@ -3543,7 +3695,7 @@ def __init__(
self.table_name = table_name
-class SqlDBTableDataSetMapping(DataSetMapping):
+class SqlDbTableDataSetMapping(DataSetMapping):
"""A SQL DB Table data set mapping.
Variables are only populated by the server, and will be ignored when sending a request.
@@ -3554,23 +3706,25 @@ class SqlDBTableDataSetMapping(DataSetMapping):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set mapping.Constant filled by server. Possible values
- include: 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder',
- 'AdlsGen2File', 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase',
- 'SqlDBTable', 'SqlDWTable', 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder",
+ "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetMappingKind
:param database_name: Required. DatabaseName name of the sink data set.
:type database_name: str
:param data_set_id: Required. The id of the source data set.
:type data_set_id: str
:ivar data_set_mapping_status: Gets the status of the data set mapping. Possible values
- include: 'Ok', 'Broken'.
+ include: "Ok", "Broken".
:vartype data_set_mapping_status: str or
~data_share_management_client.models.DataSetMappingStatus
:ivar provisioning_state: Provisioning state of the data set mapping. Possible values include:
- 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed'.
+ "Succeeded", "Creating", "Deleting", "Moving", "Failed".
:vartype provisioning_state: str or ~data_share_management_client.models.ProvisioningState
:param schema_name: Required. Schema of the table. Default value is dbo.
:type schema_name: str
@@ -3583,6 +3737,7 @@ class SqlDBTableDataSetMapping(DataSetMapping):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'database_name': {'required': True},
@@ -3597,6 +3752,7 @@ class SqlDBTableDataSetMapping(DataSetMapping):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'database_name': {'key': 'properties.databaseName', 'type': 'str'},
@@ -3618,8 +3774,8 @@ def __init__(
table_name: str,
**kwargs
):
- super(SqlDBTableDataSetMapping, self).__init__(**kwargs)
- self.kind = 'SqlDBTable'
+ super(SqlDbTableDataSetMapping, self).__init__(**kwargs)
+ self.kind = 'SqlDBTable' # type: str
self.database_name = database_name
self.data_set_id = data_set_id
self.data_set_mapping_status = None
@@ -3640,13 +3796,15 @@ class SqlDwTableDataSet(DataSet):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set.Constant filled by server. Possible values include:
- 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder', 'AdlsGen2File',
- 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase', 'SqlDBTable', 'SqlDWTable',
- 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File",
+ "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetKind
:ivar data_set_id: Unique id for identifying a data set resource.
:vartype data_set_id: str
:param data_warehouse_name: DataWarehouse name of the source data set.
@@ -3662,6 +3820,7 @@ class SqlDwTableDataSet(DataSet):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'data_set_id': {'readonly': True},
@@ -3670,6 +3829,7 @@ class SqlDwTableDataSet(DataSet):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'},
@@ -3689,7 +3849,7 @@ def __init__(
**kwargs
):
super(SqlDwTableDataSet, self).__init__(**kwargs)
- self.kind = 'SqlDWTable'
+ self.kind = 'SqlDWTable' # type: str
self.data_set_id = None
self.data_warehouse_name = data_warehouse_name
self.schema_name = schema_name
@@ -3708,23 +3868,25 @@ class SqlDwTableDataSetMapping(DataSetMapping):
:vartype id: str
:ivar name: Name of the azure resource.
:vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
:ivar type: Type of the azure resource.
:vartype type: str
:param kind: Required. Kind of data set mapping.Constant filled by server. Possible values
- include: 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder',
- 'AdlsGen2File', 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase',
- 'SqlDBTable', 'SqlDWTable', 'ScheduleBased'.
- :type kind: str or ~data_share_management_client.models.Kind
+ include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder",
+ "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetMappingKind
:param data_set_id: Required. The id of the source data set.
:type data_set_id: str
:ivar data_set_mapping_status: Gets the status of the data set mapping. Possible values
- include: 'Ok', 'Broken'.
+ include: "Ok", "Broken".
:vartype data_set_mapping_status: str or
~data_share_management_client.models.DataSetMappingStatus
:param data_warehouse_name: Required. DataWarehouse name of the source data set.
:type data_warehouse_name: str
:ivar provisioning_state: Provisioning state of the data set mapping. Possible values include:
- 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed'.
+ "Succeeded", "Creating", "Deleting", "Moving", "Failed".
:vartype provisioning_state: str or ~data_share_management_client.models.ProvisioningState
:param schema_name: Required. Schema of the table. Default value is dbo.
:type schema_name: str
@@ -3737,6 +3899,7 @@ class SqlDwTableDataSetMapping(DataSetMapping):
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
+ 'system_data': {'readonly': True},
'type': {'readonly': True},
'kind': {'required': True},
'data_set_id': {'required': True},
@@ -3751,6 +3914,7 @@ class SqlDwTableDataSetMapping(DataSetMapping):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'},
@@ -3773,7 +3937,7 @@ def __init__(
**kwargs
):
super(SqlDwTableDataSetMapping, self).__init__(**kwargs)
- self.kind = 'SqlDWTable'
+ self.kind = 'SqlDWTable' # type: str
self.data_set_id = data_set_id
self.data_set_mapping_status = None
self.data_warehouse_name = data_warehouse_name
@@ -3783,6 +3947,138 @@ def __init__(
self.table_name = table_name
+class SynapseWorkspaceSqlPoolTableDataSet(DataSet):
+ """A Synapse Workspace Sql Pool Table data set.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: The resource id of the azure resource.
+ :vartype id: str
+ :ivar name: Name of the azure resource.
+ :vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
+ :ivar type: Type of the azure resource.
+ :vartype type: str
+ :param kind: Required. Kind of data set.Constant filled by server. Possible values include:
+ "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File",
+ "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetKind
+ :ivar data_set_id: Unique id for identifying a data set resource.
+ :vartype data_set_id: str
+ :param synapse_workspace_sql_pool_table_resource_id: Required. Resource id of the Synapse
+ Workspace SQL Pool Table.
+ :type synapse_workspace_sql_pool_table_resource_id: str
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'system_data': {'readonly': True},
+ 'type': {'readonly': True},
+ 'kind': {'required': True},
+ 'data_set_id': {'readonly': True},
+ 'synapse_workspace_sql_pool_table_resource_id': {'required': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'kind': {'key': 'kind', 'type': 'str'},
+ 'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'},
+ 'synapse_workspace_sql_pool_table_resource_id': {'key': 'properties.synapseWorkspaceSqlPoolTableResourceId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ synapse_workspace_sql_pool_table_resource_id: str,
+ **kwargs
+ ):
+ super(SynapseWorkspaceSqlPoolTableDataSet, self).__init__(**kwargs)
+ self.kind = 'SynapseWorkspaceSqlPoolTable' # type: str
+ self.data_set_id = None
+ self.synapse_workspace_sql_pool_table_resource_id = synapse_workspace_sql_pool_table_resource_id
+
+
+class SynapseWorkspaceSqlPoolTableDataSetMapping(DataSetMapping):
+ """A Synapse Workspace Sql Pool Table data set mapping.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: The resource id of the azure resource.
+ :vartype id: str
+ :ivar name: Name of the azure resource.
+ :vartype name: str
+ :ivar system_data: System Data of the Azure resource.
+ :vartype system_data: ~data_share_management_client.models.SystemData
+ :ivar type: Type of the azure resource.
+ :vartype type: str
+ :param kind: Required. Kind of data set mapping.Constant filled by server. Possible values
+ include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder",
+ "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
+ :type kind: str or ~data_share_management_client.models.DataSetMappingKind
+ :param data_set_id: Required. The id of the source data set.
+ :type data_set_id: str
+ :ivar data_set_mapping_status: Gets the status of the data set mapping. Possible values
+ include: "Ok", "Broken".
+ :vartype data_set_mapping_status: str or
+ ~data_share_management_client.models.DataSetMappingStatus
+ :ivar provisioning_state: Provisioning state of the data set mapping. Possible values include:
+ "Succeeded", "Creating", "Deleting", "Moving", "Failed".
+ :vartype provisioning_state: str or ~data_share_management_client.models.ProvisioningState
+ :param synapse_workspace_sql_pool_table_resource_id: Required. Resource id of the Synapse
+ Workspace SQL Pool Table.
+ :type synapse_workspace_sql_pool_table_resource_id: str
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'system_data': {'readonly': True},
+ 'type': {'readonly': True},
+ 'kind': {'required': True},
+ 'data_set_id': {'required': True},
+ 'data_set_mapping_status': {'readonly': True},
+ 'provisioning_state': {'readonly': True},
+ 'synapse_workspace_sql_pool_table_resource_id': {'required': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'kind': {'key': 'kind', 'type': 'str'},
+ 'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'},
+ 'data_set_mapping_status': {'key': 'properties.dataSetMappingStatus', 'type': 'str'},
+ 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
+ 'synapse_workspace_sql_pool_table_resource_id': {'key': 'properties.synapseWorkspaceSqlPoolTableResourceId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ data_set_id: str,
+ synapse_workspace_sql_pool_table_resource_id: str,
+ **kwargs
+ ):
+ super(SynapseWorkspaceSqlPoolTableDataSetMapping, self).__init__(**kwargs)
+ self.kind = 'SynapseWorkspaceSqlPoolTable' # type: str
+ self.data_set_id = data_set_id
+ self.data_set_mapping_status = None
+ self.provisioning_state = None
+ self.synapse_workspace_sql_pool_table_resource_id = synapse_workspace_sql_pool_table_resource_id
+
+
class SynchronizationDetails(msrest.serialization.Model):
"""Synchronization details at data set level.
@@ -3790,9 +4086,10 @@ class SynchronizationDetails(msrest.serialization.Model):
:ivar data_set_id: Id of data set.
:vartype data_set_id: str
- :ivar data_set_type: Type of the data set. Possible values include: 'Blob', 'Container',
- 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder', 'AdlsGen2File', 'AdlsGen1Folder',
- 'AdlsGen1File', 'KustoCluster', 'KustoDatabase', 'SqlDBTable', 'SqlDWTable'.
+ :ivar data_set_type: Type of the data set. Possible values include: "Blob", "Container",
+ "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File", "AdlsGen1Folder",
+ "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable",
+ "SynapseWorkspaceSqlPoolTable".
:vartype data_set_type: str or ~data_share_management_client.models.DataSetType
:ivar duration_ms: Duration of data set level copy.
:vartype duration_ms: int
@@ -3948,7 +4245,7 @@ class Synchronize(msrest.serialization.Model):
"""Payload for the synchronizing the data.
:param synchronization_mode: Mode of synchronization used in triggers and snapshot sync.
- Incremental by default. Possible values include: 'Incremental', 'FullSync'.
+ Incremental by default. Possible values include: "Incremental", "FullSync".
:type synchronization_mode: str or ~data_share_management_client.models.SynchronizationMode
"""
@@ -3966,6 +4263,54 @@ def __init__(
self.synchronization_mode = synchronization_mode
+class SystemData(msrest.serialization.Model):
+ """Metadata pertaining to creation and last modification of the resource.
+
+ :param created_at: The timestamp of resource creation (UTC).
+ :type created_at: ~datetime.datetime
+ :param created_by: The identity that created the resource.
+ :type created_by: str
+ :param created_by_type: The type of identity that created the resource. Possible values
+ include: "User", "Application", "ManagedIdentity", "Key".
+ :type created_by_type: str or ~data_share_management_client.models.CreatedByType
+ :param last_modified_at: The type of identity that last modified the resource.
+ :type last_modified_at: ~datetime.datetime
+ :param last_modified_by: The identity that last modified the resource.
+ :type last_modified_by: str
+ :param last_modified_by_type: The type of identity that last modified the resource. Possible
+ values include: "User", "Application", "ManagedIdentity", "Key".
+ :type last_modified_by_type: str or ~data_share_management_client.models.LastModifiedByType
+ """
+
+ _attribute_map = {
+ 'created_at': {'key': 'createdAt', 'type': 'iso-8601'},
+ 'created_by': {'key': 'createdBy', 'type': 'str'},
+ 'created_by_type': {'key': 'createdByType', 'type': 'str'},
+ 'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'},
+ 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'},
+ 'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ created_at: Optional[datetime.datetime] = None,
+ created_by: Optional[str] = None,
+ created_by_type: Optional[Union[str, "CreatedByType"]] = None,
+ last_modified_at: Optional[datetime.datetime] = None,
+ last_modified_by: Optional[str] = None,
+ last_modified_by_type: Optional[Union[str, "LastModifiedByType"]] = None,
+ **kwargs
+ ):
+ super(SystemData, self).__init__(**kwargs)
+ self.created_at = created_at
+ self.created_by = created_by
+ self.created_by_type = created_by_type
+ self.last_modified_at = last_modified_at
+ self.last_modified_by = last_modified_by
+ self.last_modified_by_type = last_modified_by_type
+
+
class TriggerList(msrest.serialization.Model):
"""List response for get triggers.
diff --git a/src/datashare/azext_datashare/vendored_sdks/datashare/operations/__init__.py b/src/datashare/azext_datashare/vendored_sdks/datashare/operations/__init__.py
index 72ad85b0b3c..7cd147304b9 100644
--- a/src/datashare/azext_datashare/vendored_sdks/datashare/operations/__init__.py
+++ b/src/datashare/azext_datashare/vendored_sdks/datashare/operations/__init__.py
@@ -6,30 +6,30 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from ._account_operations import AccountOperations
-from ._consumer_invitation_operations import ConsumerInvitationOperations
-from ._data_set_operations import DataSetOperations
-from ._data_set_mapping_operations import DataSetMappingOperations
-from ._invitation_operations import InvitationOperations
-from ._operation_operations import OperationOperations
-from ._share_operations import ShareOperations
-from ._provider_share_subscription_operations import ProviderShareSubscriptionOperations
-from ._share_subscription_operations import ShareSubscriptionOperations
-from ._consumer_source_data_set_operations import ConsumerSourceDataSetOperations
-from ._synchronization_setting_operations import SynchronizationSettingOperations
-from ._trigger_operations import TriggerOperations
+from ._accounts_operations import AccountsOperations
+from ._consumer_invitations_operations import ConsumerInvitationsOperations
+from ._data_sets_operations import DataSetsOperations
+from ._data_set_mappings_operations import DataSetMappingsOperations
+from ._invitations_operations import InvitationsOperations
+from ._operations import Operations
+from ._shares_operations import SharesOperations
+from ._provider_share_subscriptions_operations import ProviderShareSubscriptionsOperations
+from ._share_subscriptions_operations import ShareSubscriptionsOperations
+from ._consumer_source_data_sets_operations import ConsumerSourceDataSetsOperations
+from ._synchronization_settings_operations import SynchronizationSettingsOperations
+from ._triggers_operations import TriggersOperations
__all__ = [
- 'AccountOperations',
- 'ConsumerInvitationOperations',
- 'DataSetOperations',
- 'DataSetMappingOperations',
- 'InvitationOperations',
- 'OperationOperations',
- 'ShareOperations',
- 'ProviderShareSubscriptionOperations',
- 'ShareSubscriptionOperations',
- 'ConsumerSourceDataSetOperations',
- 'SynchronizationSettingOperations',
- 'TriggerOperations',
+ 'AccountsOperations',
+ 'ConsumerInvitationsOperations',
+ 'DataSetsOperations',
+ 'DataSetMappingsOperations',
+ 'InvitationsOperations',
+ 'Operations',
+ 'SharesOperations',
+ 'ProviderShareSubscriptionsOperations',
+ 'ShareSubscriptionsOperations',
+ 'ConsumerSourceDataSetsOperations',
+ 'SynchronizationSettingsOperations',
+ 'TriggersOperations',
]
diff --git a/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_account_operations.py b/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_accounts_operations.py
similarity index 62%
rename from src/datashare/azext_datashare/vendored_sdks/datashare/operations/_account_operations.py
rename to src/datashare/azext_datashare/vendored_sdks/datashare/operations/_accounts_operations.py
index 2b06a214c51..b7bcc28f32a 100644
--- a/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_account_operations.py
+++ b/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_accounts_operations.py
@@ -5,22 +5,28 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
+from typing import TYPE_CHECKING
import warnings
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models
-T = TypeVar('T')
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
-class AccountOperations(object):
- """AccountOperations operations.
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class AccountsOperations(object):
+ """AccountsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
@@ -41,6 +47,81 @@ def __init__(self, client, config, serializer, deserializer):
self._deserialize = deserializer
self._config = config
+ def list_by_subscription(
+ self,
+ skip_token=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.AccountList"]
+ """List Accounts in a subscription.
+
+ List Accounts in Subscription.
+
+ :param skip_token: Continuation token.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either AccountList or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~data_share_management_client.models.AccountList]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.AccountList"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_subscription.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('AccountList', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.DataShareError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataShare/accounts'} # type: ignore
+
def get(
self,
resource_group_name, # type: str
@@ -48,25 +129,29 @@ def get(
**kwargs # type: Any
):
# type: (...) -> "models.Account"
- """Get an account.
+ """Get an account under a resource group.
- Get an account under a resource group.
+ Get an account.
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param account_name: The name of the share account.
:type account_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: Account or the result of cls(response)
+ :return: Account, or the result of cls(response)
:rtype: ~data_share_management_client.models.Account
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.Account"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
# Construct URL
- url = self.get.metadata['url']
+ url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -80,9 +165,8 @@ def get(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
@@ -90,35 +174,35 @@ def get(
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('Account', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}'}
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}'} # type: ignore
def _create_initial(
self,
resource_group_name, # type: str
account_name, # type: str
- identity, # type: "models.Identity"
- location=None, # type: Optional[str]
- tags=None, # type: Optional[Dict[str, str]]
+ account, # type: "models.Account"
**kwargs # type: Any
):
# type: (...) -> "models.Account"
cls = kwargs.pop('cls', None) # type: ClsType["models.Account"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
-
- _account = models.Account(location=location, tags=tags, identity=identity)
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
# Construct URL
- url = self._create_initial.metadata['url']
+ url = self._create_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -133,23 +217,20 @@ def _create_initial(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(_account, 'Account')
+ body_content = self._serialize.body(account, 'Account')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Account', pipeline_response)
@@ -157,55 +238,57 @@ def _create_initial(
deserialized = self._deserialize('Account', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- _create_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}'}
+ _create_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}'} # type: ignore
def begin_create(
self,
resource_group_name, # type: str
account_name, # type: str
- identity, # type: "models.Identity"
- location=None, # type: Optional[str]
- tags=None, # type: Optional[Dict[str, str]]
+ account, # type: "models.Account"
**kwargs # type: Any
):
- # type: (...) -> "models.Account"
- """Create an account.
+ # type: (...) -> LROPoller["models.Account"]
+ """Create an account in the given resource group.
- Create an account in the given resource group.
+ Create an account.
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param account_name: The name of the share account.
:type account_name: str
- :param identity: Identity Info on the Account.
- :type identity: ~data_share_management_client.models.Identity
- :param location: Location of the azure resource.
- :type location: str
- :param tags: Tags on the azure resource.
- :type tags: dict[str, str]
+ :param account: The account payload.
+ :type account: ~data_share_management_client.models.Account
:keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
- :return: An instance of LROPoller that returns Account
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either Account or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~data_share_management_client.models.Account]
-
:raises ~azure.core.exceptions.HttpResponseError:
"""
- polling = kwargs.pop('polling', False) # type: Union[bool, PollingMethod]
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.Account"]
- raw_result = self._create_initial(
- resource_group_name=resource_group_name,
- account_name=account_name,
- identity=identity,
- location=location,
- tags=tags,
- cls=lambda x,y,z: x,
- **kwargs
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
)
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._create_initial(
+ resource_group_name=resource_group_name,
+ account_name=account_name,
+ account=account,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('Account', pipeline_response)
@@ -214,15 +297,25 @@ def get_long_running_output(pipeline_response):
return cls(pipeline_response, deserialized, {})
return deserialized
- lro_delay = kwargs.get(
- 'polling_interval',
- self._config.polling_interval
- )
- if polling is True: raise ValueError("polling being True is not valid because no default polling implemetation has been defined.")
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'accountName': self._serialize.url("account_name", account_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
- begin_create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}'}
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}'} # type: ignore
def _delete_initial(
self,
@@ -230,13 +323,17 @@ def _delete_initial(
account_name, # type: str
**kwargs # type: Any
):
- # type: (...) -> "models.OperationResponse"
- cls = kwargs.pop('cls', None) # type: ClsType["models.OperationResponse"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ # type: (...) -> Optional["models.OperationResponse"]
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.OperationResponse"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
# Construct URL
- url = self._delete_initial.metadata['url']
+ url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -250,9 +347,8 @@ def _delete_initial(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
@@ -260,17 +356,17 @@ def _delete_initial(
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationResponse', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}'}
+ _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}'} # type: ignore
def begin_delete(
self,
@@ -278,32 +374,42 @@ def begin_delete(
account_name, # type: str
**kwargs # type: Any
):
- # type: (...) -> "models.OperationResponse"
- """DeleteAccount.
+ # type: (...) -> LROPoller["models.OperationResponse"]
+ """Delete an account.
- Delete an account.
+ DeleteAccount.
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param account_name: The name of the share account.
:type account_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
- :return: An instance of LROPoller that returns OperationResponse
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either OperationResponse or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~data_share_management_client.models.OperationResponse]
-
:raises ~azure.core.exceptions.HttpResponseError:
"""
- polling = kwargs.pop('polling', False) # type: Union[bool, PollingMethod]
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.OperationResponse"]
- raw_result = self._delete_initial(
- resource_group_name=resource_group_name,
- account_name=account_name,
- cls=lambda x,y,z: x,
- **kwargs
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
)
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._delete_initial(
+ resource_group_name=resource_group_name,
+ account_name=account_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('OperationResponse', pipeline_response)
@@ -312,48 +418,60 @@ def get_long_running_output(pipeline_response):
return cls(pipeline_response, deserialized, {})
return deserialized
- lro_delay = kwargs.get(
- 'polling_interval',
- self._config.polling_interval
- )
- if polling is True: raise ValueError("polling being True is not valid because no default polling implemetation has been defined.")
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'accountName': self._serialize.url("account_name", account_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
- begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}'}
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}'} # type: ignore
def update(
self,
resource_group_name, # type: str
account_name, # type: str
- tags=None, # type: Optional[Dict[str, str]]
+ account_update_parameters, # type: "models.AccountUpdateParameters"
**kwargs # type: Any
):
# type: (...) -> "models.Account"
- """Patch an account.
+ """Patch a given account.
- Patch a given account.
+ Patch an account.
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param account_name: The name of the share account.
:type account_name: str
- :param tags: Tags on the azure resource.
- :type tags: dict[str, str]
+ :param account_update_parameters: The account update parameters.
+ :type account_update_parameters: ~data_share_management_client.models.AccountUpdateParameters
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: Account or the result of cls(response)
+ :return: Account, or the result of cls(response)
:rtype: ~data_share_management_client.models.Account
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.Account"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
-
- _account_update_parameters = models.AccountUpdateParameters(tags=tags)
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
# Construct URL
- url = self.update.metadata['url']
+ url = self.update.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -368,100 +486,27 @@ def update(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(_account_update_parameters, 'AccountUpdateParameters')
+ body_content = self._serialize.body(account_update_parameters, 'AccountUpdateParameters')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
-
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('Account', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}'}
-
- def list_by_subscription(
- self,
- skip_token=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.AccountList"
- """List Accounts in Subscription.
-
- List Accounts in a subscription.
-
- :param skip_token: Continuation token.
- :type skip_token: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: AccountList or the result of cls(response)
- :rtype: ~data_share_management_client.models.AccountList
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.AccountList"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
-
- def prepare_request(next_link=None):
- if not next_link:
- # Construct URL
- url = self.list_by_subscription.metadata['url']
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- else:
- url = next_link
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
- if skip_token is not None:
- query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- # Construct and send request
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- def extract_data(pipeline_response):
- deserialized = self._deserialize('AccountList', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, iter(list_of_elem)
-
- def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- error = self._deserialize(models.DataShareError, response)
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, model=error)
-
- return pipeline_response
-
- return ItemPaged(
- get_next, extract_data
- )
- list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataShare/accounts'}
+ update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}'} # type: ignore
def list_by_resource_group(
self,
@@ -469,48 +514,52 @@ def list_by_resource_group(
skip_token=None, # type: Optional[str]
**kwargs # type: Any
):
- # type: (...) -> "models.AccountList"
- """List Accounts in ResourceGroup.
+ # type: (...) -> Iterable["models.AccountList"]
+ """List Accounts in a resource group.
- List Accounts in a resource group.
+ List Accounts in ResourceGroup.
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param skip_token: Continuation token.
:type skip_token: str
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: AccountList or the result of cls(response)
- :rtype: ~data_share_management_client.models.AccountList
+ :return: An iterator like instance of either AccountList or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~data_share_management_client.models.AccountList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.AccountList"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
if not next_link:
# Construct URL
- url = self.list_by_resource_group.metadata['url']
+ url = self.list_by_resource_group.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
- if skip_token is not None:
- query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- # Construct and send request
- request = self._client.get(url, query_parameters, header_parameters)
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
@@ -529,11 +578,11 @@ def get_next(next_link=None):
if response.status_code not in [200]:
error = self._deserialize(models.DataShareError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
- list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts'}
+ list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts'} # type: ignore
diff --git a/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_consumer_invitation_operations.py b/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_consumer_invitations_operations.py
similarity index 65%
rename from src/datashare/azext_datashare/vendored_sdks/datashare/operations/_consumer_invitation_operations.py
rename to src/datashare/azext_datashare/vendored_sdks/datashare/operations/_consumer_invitations_operations.py
index 61235cd44a9..683df41932b 100644
--- a/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_consumer_invitation_operations.py
+++ b/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_consumer_invitations_operations.py
@@ -5,21 +5,26 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, Callable, Dict, Generic, Optional, TypeVar
+from typing import TYPE_CHECKING
import warnings
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models
-T = TypeVar('T')
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
-class ConsumerInvitationOperations(object):
- """ConsumerInvitationOperations operations.
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class ConsumerInvitationsOperations(object):
+ """ConsumerInvitationsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
@@ -40,37 +45,110 @@ def __init__(self, client, config, serializer, deserializer):
self._deserialize = deserializer
self._config = config
- def reject_invitation(
+ def list_invitations(
+ self,
+ skip_token=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.ConsumerInvitationList"]
+ """List the invitations.
+
+ Lists invitations.
+
+ :param skip_token: The continuation token.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ConsumerInvitationList or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~data_share_management_client.models.ConsumerInvitationList]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ConsumerInvitationList"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_invitations.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('ConsumerInvitationList', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.DataShareError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list_invitations.metadata = {'url': '/providers/Microsoft.DataShare/listInvitations'} # type: ignore
+
+ def get(
self,
location, # type: str
invitation_id, # type: str
**kwargs # type: Any
):
# type: (...) -> "models.ConsumerInvitation"
- """Reject an invitation.
+ """Gets the invitation identified by invitationId.
- Rejects the invitation identified by invitationId.
+ Get an invitation.
:param location: Location of the invitation.
:type location: str
- :param invitation_id: Unique id of the invitation.
+ :param invitation_id: An invitation id.
:type invitation_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: ConsumerInvitation or the result of cls(response)
+ :return: ConsumerInvitation, or the result of cls(response)
:rtype: ~data_share_management_client.models.ConsumerInvitation
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ConsumerInvitation"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
-
- _invitation = models.ConsumerInvitation(invitation_id=invitation_id)
- api_version = "2019-11-01"
- content_type = kwargs.pop("content_type", "application/json")
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
# Construct URL
- url = self.reject_invitation.metadata['url']
+ url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'location': self._serialize.url("location", location, 'str'),
+ 'invitationId': self._serialize.url("invitation_id", invitation_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
@@ -80,60 +158,58 @@ def reject_invitation(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- # Construct and send request
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(_invitation, 'ConsumerInvitation')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+ request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('ConsumerInvitation', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- reject_invitation.metadata = {'url': '/providers/Microsoft.DataShare/locations/{location}/RejectInvitation'}
+ get.metadata = {'url': '/providers/Microsoft.DataShare/locations/{location}/consumerInvitations/{invitationId}'} # type: ignore
- def get(
+ def reject_invitation(
self,
location, # type: str
- invitation_id, # type: str
+ invitation, # type: "models.ConsumerInvitation"
**kwargs # type: Any
):
# type: (...) -> "models.ConsumerInvitation"
- """Get an invitation.
+ """Rejects the invitation identified by invitationId.
- Gets the invitation identified by invitationId.
+ Reject an invitation.
:param location: Location of the invitation.
:type location: str
- :param invitation_id: An invitation id.
- :type invitation_id: str
+ :param invitation: An invitation payload.
+ :type invitation: ~data_share_management_client.models.ConsumerInvitation
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: ConsumerInvitation or the result of cls(response)
+ :return: ConsumerInvitation, or the result of cls(response)
:rtype: ~data_share_management_client.models.ConsumerInvitation
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ConsumerInvitation"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
# Construct URL
- url = self.get.metadata['url']
+ url = self.reject_invitation.metadata['url'] # type: ignore
path_format_arguments = {
'location': self._serialize.url("location", location, 'str'),
- 'invitationId': self._serialize.url("invitation_id", invitation_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
@@ -143,89 +219,25 @@ def get(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
- request = self._client.get(url, query_parameters, header_parameters)
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(invitation, 'ConsumerInvitation')
+ body_content_kwargs['content'] = body_content
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('ConsumerInvitation', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- get.metadata = {'url': '/providers/Microsoft.DataShare/locations/{location}/consumerInvitations/{invitationId}'}
-
- def list_invitation(
- self,
- skip_token=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.ConsumerInvitationList"
- """Lists invitations.
-
- List the invitations.
-
- :param skip_token: The continuation token.
- :type skip_token: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: ConsumerInvitationList or the result of cls(response)
- :rtype: ~data_share_management_client.models.ConsumerInvitationList
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.ConsumerInvitationList"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
-
- def prepare_request(next_link=None):
- if not next_link:
- # Construct URL
- url = self.list_invitation.metadata['url']
- else:
- url = next_link
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
- if skip_token is not None:
- query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- # Construct and send request
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- def extract_data(pipeline_response):
- deserialized = self._deserialize('ConsumerInvitationList', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, iter(list_of_elem)
-
- def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- error = self._deserialize(models.DataShareError, response)
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, model=error)
-
- return pipeline_response
-
- return ItemPaged(
- get_next, extract_data
- )
- list_invitation.metadata = {'url': '/providers/Microsoft.DataShare/ListInvitations'}
+ reject_invitation.metadata = {'url': '/providers/Microsoft.DataShare/locations/{location}/rejectInvitation'} # type: ignore
diff --git a/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_consumer_source_data_set_operations.py b/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_consumer_source_data_sets_operations.py
similarity index 69%
rename from src/datashare/azext_datashare/vendored_sdks/datashare/operations/_consumer_source_data_set_operations.py
rename to src/datashare/azext_datashare/vendored_sdks/datashare/operations/_consumer_source_data_sets_operations.py
index f45b8832698..d0b2f92dc01 100644
--- a/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_consumer_source_data_set_operations.py
+++ b/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_consumer_source_data_sets_operations.py
@@ -5,21 +5,26 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, Callable, Dict, Generic, Optional, TypeVar
+from typing import TYPE_CHECKING
import warnings
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models
-T = TypeVar('T')
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
-class ConsumerSourceDataSetOperations(object):
- """ConsumerSourceDataSetOperations operations.
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class ConsumerSourceDataSetsOperations(object):
+ """ConsumerSourceDataSetsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
@@ -48,7 +53,7 @@ def list_by_share_subscription(
skip_token=None, # type: Optional[str]
**kwargs # type: Any
):
- # type: (...) -> "models.ConsumerSourceDataSetList"
+ # type: (...) -> Iterable["models.ConsumerSourceDataSetList"]
"""Get source dataSets of a shareSubscription.
Get source dataSets of a shareSubscription.
@@ -62,18 +67,26 @@ def list_by_share_subscription(
:param skip_token: Continuation token.
:type skip_token: str
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: ConsumerSourceDataSetList or the result of cls(response)
- :rtype: ~data_share_management_client.models.ConsumerSourceDataSetList
+ :return: An iterator like instance of either ConsumerSourceDataSetList or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~data_share_management_client.models.ConsumerSourceDataSetList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ConsumerSourceDataSetList"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
if not next_link:
# Construct URL
- url = self.list_by_share_subscription.metadata['url']
+ url = self.list_by_share_subscription.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -81,21 +94,17 @@ def prepare_request(next_link=None):
'shareSubscriptionName': self._serialize.url("share_subscription_name", share_subscription_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
- if skip_token is not None:
- query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- # Construct and send request
- request = self._client.get(url, query_parameters, header_parameters)
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
@@ -114,11 +123,11 @@ def get_next(next_link=None):
if response.status_code not in [200]:
error = self._deserialize(models.DataShareError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
- list_by_share_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/ConsumerSourceDataSets'}
+ list_by_share_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/consumerSourceDataSets'} # type: ignore
diff --git a/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_data_set_mapping_operations.py b/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_data_set_mappings_operations.py
similarity index 74%
rename from src/datashare/azext_datashare/vendored_sdks/datashare/operations/_data_set_mapping_operations.py
rename to src/datashare/azext_datashare/vendored_sdks/datashare/operations/_data_set_mappings_operations.py
index be59ce3d9b9..49bfc88f135 100644
--- a/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_data_set_mapping_operations.py
+++ b/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_data_set_mappings_operations.py
@@ -5,21 +5,26 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
+from typing import TYPE_CHECKING
import warnings
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models
-T = TypeVar('T')
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
-class DataSetMappingOperations(object):
- """DataSetMappingOperations operations.
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class DataSetMappingsOperations(object):
+ """DataSetMappingsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
@@ -49,9 +54,9 @@ def get(
**kwargs # type: Any
):
# type: (...) -> "models.DataSetMapping"
- """Get a DataSetMapping in a shareSubscription.
+ """Get DataSetMapping in a shareSubscription.
- Get DataSetMapping in a shareSubscription.
+ Get a DataSetMapping in a shareSubscription.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -62,16 +67,20 @@ def get(
:param data_set_mapping_name: The name of the dataSetMapping.
:type data_set_mapping_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: DataSetMapping or the result of cls(response)
+ :return: DataSetMapping, or the result of cls(response)
:rtype: ~data_share_management_client.models.DataSetMapping
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.DataSetMapping"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
# Construct URL
- url = self.get.metadata['url']
+ url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -87,9 +96,8 @@ def get(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
@@ -97,15 +105,15 @@ def get(
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('DataSetMapping', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/dataSetMappings/{dataSetMappingName}'}
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/dataSetMappings/{dataSetMappingName}'} # type: ignore
def create(
self,
@@ -117,11 +125,11 @@ def create(
**kwargs # type: Any
):
# type: (...) -> "models.DataSetMapping"
- """Create a DataSetMapping.
-
- Maps a source data set in the source share to a sink data set in the share subscription.
+ """Maps a source data set in the source share to a sink data set in the share subscription.
Enables copying the data set from source to destination.
+ Create a DataSetMapping.
+
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param account_name: The name of the share account.
@@ -134,17 +142,21 @@ def create(
:param data_set_mapping: Destination data set configuration details.
:type data_set_mapping: ~data_share_management_client.models.DataSetMapping
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: DataSetMapping or the result of cls(response)
- :rtype: ~data_share_management_client.models.DataSetMapping or ~data_share_management_client.models.DataSetMapping
+ :return: DataSetMapping, or the result of cls(response)
+ :rtype: ~data_share_management_client.models.DataSetMapping
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.DataSetMapping"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
# Construct URL
- url = self.create.metadata['url']
+ url = self.create.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -161,23 +173,20 @@ def create(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(data_set_mapping, 'DataSetMapping')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('DataSetMapping', pipeline_response)
@@ -185,10 +194,10 @@ def create(
deserialized = self._deserialize('DataSetMapping', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/dataSetMappings/{dataSetMappingName}'}
+ create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/dataSetMappings/{dataSetMappingName}'} # type: ignore
def delete(
self,
@@ -199,9 +208,9 @@ def delete(
**kwargs # type: Any
):
# type: (...) -> None
- """Delete a DataSetMapping in a shareSubscription.
+ """Delete DataSetMapping in a shareSubscription.
- Delete DataSetMapping in a shareSubscription.
+ Delete a DataSetMapping in a shareSubscription.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -212,16 +221,20 @@ def delete(
:param data_set_mapping_name: The name of the dataSetMapping.
:type data_set_mapping_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: None or the result of cls(response)
+ :return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
# Construct URL
- url = self.delete.metadata['url']
+ url = self.delete.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -237,8 +250,8 @@ def delete(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
@@ -246,12 +259,12 @@ def delete(
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {})
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/dataSetMappings/{dataSetMappingName}'}
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/dataSetMappings/{dataSetMappingName}'} # type: ignore
def list_by_share_subscription(
self,
@@ -259,9 +272,11 @@ def list_by_share_subscription(
account_name, # type: str
share_subscription_name, # type: str
skip_token=None, # type: Optional[str]
+ filter=None, # type: Optional[str]
+ orderby=None, # type: Optional[str]
**kwargs # type: Any
):
- # type: (...) -> "models.DataSetMappingList"
+ # type: (...) -> Iterable["models.DataSetMappingList"]
"""List DataSetMappings in a share subscription.
List DataSetMappings in a share subscription.
@@ -274,19 +289,31 @@ def list_by_share_subscription(
:type share_subscription_name: str
:param skip_token: Continuation token.
:type skip_token: str
+ :param filter: Filters the results using OData syntax.
+ :type filter: str
+ :param orderby: Sorts the results using OData syntax.
+ :type orderby: str
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: DataSetMappingList or the result of cls(response)
- :rtype: ~data_share_management_client.models.DataSetMappingList
+ :return: An iterator like instance of either DataSetMappingList or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~data_share_management_client.models.DataSetMappingList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.DataSetMappingList"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
if not next_link:
# Construct URL
- url = self.list_by_share_subscription.metadata['url']
+ url = self.list_by_share_subscription.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -294,21 +321,21 @@ def prepare_request(next_link=None):
'shareSubscriptionName': self._serialize.url("share_subscription_name", share_subscription_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ if filter is not None:
+ query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
+ if orderby is not None:
+ query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
- if skip_token is not None:
- query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- # Construct and send request
- request = self._client.get(url, query_parameters, header_parameters)
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
@@ -327,11 +354,11 @@ def get_next(next_link=None):
if response.status_code not in [200]:
error = self._deserialize(models.DataShareError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
- list_by_share_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/dataSetMappings'}
+ list_by_share_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/dataSetMappings'} # type: ignore
diff --git a/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_data_set_operations.py b/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_data_sets_operations.py
similarity index 67%
rename from src/datashare/azext_datashare/vendored_sdks/datashare/operations/_data_set_operations.py
rename to src/datashare/azext_datashare/vendored_sdks/datashare/operations/_data_sets_operations.py
index 95929ba4858..f6e94b92dd3 100644
--- a/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_data_set_operations.py
+++ b/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_data_sets_operations.py
@@ -5,22 +5,28 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
+from typing import TYPE_CHECKING
import warnings
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models
-T = TypeVar('T')
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
-class DataSetOperations(object):
- """DataSetOperations operations.
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class DataSetsOperations(object):
+ """DataSetsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
@@ -50,9 +56,9 @@ def get(
**kwargs # type: Any
):
# type: (...) -> "models.DataSet"
- """Get a DataSet in a share.
+ """Get DataSet in a share.
- Get DataSet in a share.
+ Get a DataSet in a share.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -63,16 +69,20 @@ def get(
:param data_set_name: The name of the dataSet.
:type data_set_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: DataSet or the result of cls(response)
+ :return: DataSet, or the result of cls(response)
:rtype: ~data_share_management_client.models.DataSet
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.DataSet"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
# Construct URL
- url = self.get.metadata['url']
+ url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -88,9 +98,8 @@ def get(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
@@ -98,15 +107,15 @@ def get(
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('DataSet', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/dataSets/{dataSetName}'}
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/dataSets/{dataSetName}'} # type: ignore
def create(
self,
@@ -118,9 +127,9 @@ def create(
**kwargs # type: Any
):
# type: (...) -> "models.DataSet"
- """Create a DataSet.
+ """Adds a new data set to an existing share.
- Adds a new data set to an existing share.
+ Create a DataSet.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -133,17 +142,21 @@ def create(
:param data_set: The new data set information.
:type data_set: ~data_share_management_client.models.DataSet
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: DataSet or the result of cls(response)
- :rtype: ~data_share_management_client.models.DataSet or ~data_share_management_client.models.DataSet
+ :return: DataSet, or the result of cls(response)
+ :rtype: ~data_share_management_client.models.DataSet
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.DataSet"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
# Construct URL
- url = self.create.metadata['url']
+ url = self.create.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -160,23 +173,20 @@ def create(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(data_set, 'DataSet')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('DataSet', pipeline_response)
@@ -184,10 +194,10 @@ def create(
deserialized = self._deserialize('DataSet', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/dataSets/{dataSetName}'}
+ create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/dataSets/{dataSetName}'} # type: ignore
def _delete_initial(
self,
@@ -199,11 +209,15 @@ def _delete_initial(
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
# Construct URL
- url = self._delete_initial.metadata['url']
+ url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -219,8 +233,8 @@ def _delete_initial(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
@@ -228,12 +242,12 @@ def _delete_initial(
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {})
- _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/dataSets/{dataSetName}'}
+ _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/dataSets/{dataSetName}'} # type: ignore
def begin_delete(
self,
@@ -243,10 +257,10 @@ def begin_delete(
data_set_name, # type: str
**kwargs # type: Any
):
- # type: (...) -> None
- """Delete a DataSet in a share.
+ # type: (...) -> LROPoller[None]
+ """Delete DataSet in a share.
- Delete DataSet in a share.
+ Delete a DataSet in a share.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -257,38 +271,60 @@ def begin_delete(
:param data_set_name: The name of the dataSet.
:type data_set_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
- :return: An instance of LROPoller that returns None
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
-
:raises ~azure.core.exceptions.HttpResponseError:
"""
- polling = kwargs.pop('polling', False) # type: Union[bool, PollingMethod]
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
- raw_result = self._delete_initial(
- resource_group_name=resource_group_name,
- account_name=account_name,
- share_name=share_name,
- data_set_name=data_set_name,
- cls=lambda x,y,z: x,
- **kwargs
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
)
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._delete_initial(
+ resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_name=share_name,
+ data_set_name=data_set_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
- lro_delay = kwargs.get(
- 'polling_interval',
- self._config.polling_interval
- )
- if polling is True: raise ValueError("polling being True is not valid because no default polling implemetation has been defined.")
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'accountName': self._serialize.url("account_name", account_name, 'str'),
+ 'shareName': self._serialize.url("share_name", share_name, 'str'),
+ 'dataSetName': self._serialize.url("data_set_name", data_set_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
- begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/dataSets/{dataSetName}'}
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/dataSets/{dataSetName}'} # type: ignore
def list_by_share(
self,
@@ -296,9 +332,11 @@ def list_by_share(
account_name, # type: str
share_name, # type: str
skip_token=None, # type: Optional[str]
+ filter=None, # type: Optional[str]
+ orderby=None, # type: Optional[str]
**kwargs # type: Any
):
- # type: (...) -> "models.DataSetList"
+ # type: (...) -> Iterable["models.DataSetList"]
"""List DataSets in a share.
List DataSets in a share.
@@ -311,19 +349,31 @@ def list_by_share(
:type share_name: str
:param skip_token: continuation token.
:type skip_token: str
+ :param filter: Filters the results using OData syntax.
+ :type filter: str
+ :param orderby: Sorts the results using OData syntax.
+ :type orderby: str
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: DataSetList or the result of cls(response)
- :rtype: ~data_share_management_client.models.DataSetList
+ :return: An iterator like instance of either DataSetList or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~data_share_management_client.models.DataSetList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.DataSetList"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
if not next_link:
# Construct URL
- url = self.list_by_share.metadata['url']
+ url = self.list_by_share.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -331,21 +381,21 @@ def prepare_request(next_link=None):
'shareName': self._serialize.url("share_name", share_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ if filter is not None:
+ query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
+ if orderby is not None:
+ query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
- if skip_token is not None:
- query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- # Construct and send request
- request = self._client.get(url, query_parameters, header_parameters)
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
@@ -364,11 +414,11 @@ def get_next(next_link=None):
if response.status_code not in [200]:
error = self._deserialize(models.DataShareError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
- list_by_share.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/dataSets'}
+ list_by_share.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/dataSets'} # type: ignore
diff --git a/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_invitation_operations.py b/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_invitations_operations.py
similarity index 71%
rename from src/datashare/azext_datashare/vendored_sdks/datashare/operations/_invitation_operations.py
rename to src/datashare/azext_datashare/vendored_sdks/datashare/operations/_invitations_operations.py
index f6d2e38dc1a..5f6deebfe46 100644
--- a/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_invitation_operations.py
+++ b/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_invitations_operations.py
@@ -5,21 +5,26 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
+from typing import TYPE_CHECKING
import warnings
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models
-T = TypeVar('T')
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
-class InvitationOperations(object):
- """InvitationOperations operations.
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class InvitationsOperations(object):
+ """InvitationsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
@@ -49,9 +54,9 @@ def get(
**kwargs # type: Any
):
# type: (...) -> "models.Invitation"
- """Get an invitation in a share.
+ """Get Invitation in a share.
- Get Invitation in a share.
+ Get an invitation in a share.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -62,16 +67,20 @@ def get(
:param invitation_name: The name of the invitation.
:type invitation_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: Invitation or the result of cls(response)
+ :return: Invitation, or the result of cls(response)
:rtype: ~data_share_management_client.models.Invitation
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.Invitation"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
# Construct URL
- url = self.get.metadata['url']
+ url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -87,9 +96,8 @@ def get(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
@@ -97,15 +105,15 @@ def get(
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('Invitation', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/invitations/{invitationName}'}
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/invitations/{invitationName}'} # type: ignore
def create(
self,
@@ -113,15 +121,13 @@ def create(
account_name, # type: str
share_name, # type: str
invitation_name, # type: str
- target_active_directory_id=None, # type: Optional[str]
- target_email=None, # type: Optional[str]
- target_object_id=None, # type: Optional[str]
+ invitation, # type: "models.Invitation"
**kwargs # type: Any
):
# type: (...) -> "models.Invitation"
- """Create an invitation.
+ """Sends a new invitation to a recipient to access a share.
- Sends a new invitation to a recipient to access a share.
+ Create an invitation.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -131,28 +137,24 @@ def create(
:type share_name: str
:param invitation_name: The name of the invitation.
:type invitation_name: str
- :param target_active_directory_id: The target Azure AD Id. Can't be combined with email.
- :type target_active_directory_id: str
- :param target_email: The email the invitation is directed to.
- :type target_email: str
- :param target_object_id: The target user or application Id that invitation is being sent to.
- Must be specified along TargetActiveDirectoryId. This enables sending
- invitations to specific users or applications in an AD tenant.
- :type target_object_id: str
+ :param invitation: Invitation details.
+ :type invitation: ~data_share_management_client.models.Invitation
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: Invitation or the result of cls(response)
- :rtype: ~data_share_management_client.models.Invitation or ~data_share_management_client.models.Invitation
+ :return: Invitation, or the result of cls(response)
+ :rtype: ~data_share_management_client.models.Invitation
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.Invitation"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
-
- _invitation = models.Invitation(target_active_directory_id=target_active_directory_id, target_email=target_email, target_object_id=target_object_id)
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
# Construct URL
- url = self.create.metadata['url']
+ url = self.create.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -169,23 +171,20 @@ def create(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(_invitation, 'Invitation')
+ body_content = self._serialize.body(invitation, 'Invitation')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Invitation', pipeline_response)
@@ -193,10 +192,10 @@ def create(
deserialized = self._deserialize('Invitation', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/invitations/{invitationName}'}
+ create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/invitations/{invitationName}'} # type: ignore
def delete(
self,
@@ -207,9 +206,9 @@ def delete(
**kwargs # type: Any
):
# type: (...) -> None
- """Delete an invitation in a share.
+ """Delete Invitation in a share.
- Delete Invitation in a share.
+ Delete an invitation in a share.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -220,16 +219,20 @@ def delete(
:param invitation_name: The name of the invitation.
:type invitation_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: None or the result of cls(response)
+ :return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
# Construct URL
- url = self.delete.metadata['url']
+ url = self.delete.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -245,8 +248,8 @@ def delete(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
@@ -254,12 +257,12 @@ def delete(
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
- return cls(pipeline_response, None, {})
+ return cls(pipeline_response, None, {})
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/invitations/{invitationName}'}
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/invitations/{invitationName}'} # type: ignore
def list_by_share(
self,
@@ -267,12 +270,14 @@ def list_by_share(
account_name, # type: str
share_name, # type: str
skip_token=None, # type: Optional[str]
+ filter=None, # type: Optional[str]
+ orderby=None, # type: Optional[str]
**kwargs # type: Any
):
- # type: (...) -> "models.InvitationList"
- """List invitations in a share.
+ # type: (...) -> Iterable["models.InvitationList"]
+ """List all Invitations in a share.
- List all Invitations in a share.
+ List invitations in a share.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -282,19 +287,31 @@ def list_by_share(
:type share_name: str
:param skip_token: The continuation token.
:type skip_token: str
+ :param filter: Filters the results using OData syntax.
+ :type filter: str
+ :param orderby: Sorts the results using OData syntax.
+ :type orderby: str
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: InvitationList or the result of cls(response)
- :rtype: ~data_share_management_client.models.InvitationList
+ :return: An iterator like instance of either InvitationList or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~data_share_management_client.models.InvitationList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.InvitationList"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
if not next_link:
# Construct URL
- url = self.list_by_share.metadata['url']
+ url = self.list_by_share.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -302,21 +319,21 @@ def prepare_request(next_link=None):
'shareName': self._serialize.url("share_name", share_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ if filter is not None:
+ query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
+ if orderby is not None:
+ query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
- if skip_token is not None:
- query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- # Construct and send request
- request = self._client.get(url, query_parameters, header_parameters)
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
@@ -335,11 +352,11 @@ def get_next(next_link=None):
if response.status_code not in [200]:
error = self._deserialize(models.DataShareError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
- list_by_share.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/invitations'}
+ list_by_share.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/invitations'} # type: ignore
diff --git a/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_operation_operations.py b/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_operations.py
similarity index 62%
rename from src/datashare/azext_datashare/vendored_sdks/datashare/operations/_operation_operations.py
rename to src/datashare/azext_datashare/vendored_sdks/datashare/operations/_operations.py
index ea88e363ce7..2d19616cb95 100644
--- a/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_operation_operations.py
+++ b/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_operations.py
@@ -5,21 +5,26 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, Callable, Dict, Generic, Optional, TypeVar
+from typing import TYPE_CHECKING
import warnings
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models
-T = TypeVar('T')
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
-class OperationOperations(object):
- """OperationOperations operations.
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class Operations(object):
+ """Operations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
@@ -44,37 +49,41 @@ def list(
self,
**kwargs # type: Any
):
- # type: (...) -> "models.OperationList"
- """List of available operations.
+ # type: (...) -> Iterable["models.OperationList"]
+ """Lists the available operations.
- Lists the available operations.
+ List of available operations.
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: OperationList or the result of cls(response)
- :rtype: ~data_share_management_client.models.OperationList
+ :return: An iterator like instance of either OperationList or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~data_share_management_client.models.OperationList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.OperationList"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
if not next_link:
# Construct URL
- url = self.list.metadata['url']
+ url = self.list.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- # Construct and send request
- request = self._client.get(url, query_parameters, header_parameters)
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
@@ -93,11 +102,11 @@ def get_next(next_link=None):
if response.status_code not in [200]:
error = self._deserialize(models.DataShareError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
- list.metadata = {'url': '/providers/Microsoft.DataShare/operations'}
+ list.metadata = {'url': '/providers/Microsoft.DataShare/operations'} # type: ignore
diff --git a/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_provider_share_subscription_operations.py b/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_provider_share_subscriptions_operations.py
similarity index 56%
rename from src/datashare/azext_datashare/vendored_sdks/datashare/operations/_provider_share_subscription_operations.py
rename to src/datashare/azext_datashare/vendored_sdks/datashare/operations/_provider_share_subscriptions_operations.py
index 8c663fab40b..3937518e12a 100644
--- a/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_provider_share_subscription_operations.py
+++ b/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_provider_share_subscriptions_operations.py
@@ -5,22 +5,28 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
+from typing import TYPE_CHECKING
import warnings
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models
-T = TypeVar('T')
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
-class ProviderShareSubscriptionOperations(object):
- """ProviderShareSubscriptionOperations operations.
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class ProviderShareSubscriptionsOperations(object):
+ """ProviderShareSubscriptionsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
@@ -41,18 +47,19 @@ def __init__(self, client, config, serializer, deserializer):
self._deserialize = deserializer
self._config = config
- def get_by_share(
+ def adjust(
self,
resource_group_name, # type: str
account_name, # type: str
share_name, # type: str
provider_share_subscription_id, # type: str
+ provider_share_subscription, # type: "models.ProviderShareSubscription"
**kwargs # type: Any
):
# type: (...) -> "models.ProviderShareSubscription"
- """Get share subscription in a provider share.
+ """Adjust the expiration date of a share subscription in a provider share.
- Get share subscription in a provider share.
+ Adjust a share subscription's expiration date in a provider share.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -62,17 +69,24 @@ def get_by_share(
:type share_name: str
:param provider_share_subscription_id: To locate shareSubscription.
:type provider_share_subscription_id: str
+ :param provider_share_subscription: The provider share subscription.
+ :type provider_share_subscription: ~data_share_management_client.models.ProviderShareSubscription
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: ProviderShareSubscription or the result of cls(response)
+ :return: ProviderShareSubscription, or the result of cls(response)
:rtype: ~data_share_management_client.models.ProviderShareSubscription
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ProviderShareSubscription"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
# Construct URL
- url = self.get_by_share.metadata['url']
+ url = self.adjust.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -88,38 +102,42 @@ def get_by_share(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
- request = self._client.get(url, query_parameters, header_parameters)
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(provider_share_subscription, 'ProviderShareSubscription')
+ body_content_kwargs['content'] = body_content
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('ProviderShareSubscription', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- get_by_share.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/providerShareSubscriptions/{providerShareSubscriptionId}'}
+ adjust.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/providerShareSubscriptions/{providerShareSubscriptionId}/adjust'} # type: ignore
- def list_by_share(
+ def reinstate(
self,
resource_group_name, # type: str
account_name, # type: str
share_name, # type: str
- skip_token=None, # type: Optional[str]
+ provider_share_subscription_id, # type: str
+ provider_share_subscription, # type: "models.ProviderShareSubscription"
**kwargs # type: Any
):
- # type: (...) -> "models.ProviderShareSubscriptionList"
- """List share subscriptions in a provider share.
+ # type: (...) -> "models.ProviderShareSubscription"
+ """Reinstate share subscription in a provider share.
- List of available share subscriptions to a provider share.
+ Reinstate share subscription in a provider share.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -127,69 +145,63 @@ def list_by_share(
:type account_name: str
:param share_name: The name of the share.
:type share_name: str
- :param skip_token: Continuation Token.
- :type skip_token: str
+ :param provider_share_subscription_id: To locate shareSubscription.
+ :type provider_share_subscription_id: str
+ :param provider_share_subscription: The provider share subscription.
+ :type provider_share_subscription: ~data_share_management_client.models.ProviderShareSubscription
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: ProviderShareSubscriptionList or the result of cls(response)
- :rtype: ~data_share_management_client.models.ProviderShareSubscriptionList
+ :return: ProviderShareSubscription, or the result of cls(response)
+ :rtype: ~data_share_management_client.models.ProviderShareSubscription
:raises: ~azure.core.exceptions.HttpResponseError
"""
- cls = kwargs.pop('cls', None) # type: ClsType["models.ProviderShareSubscriptionList"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
-
- def prepare_request(next_link=None):
- if not next_link:
- # Construct URL
- url = self.list_by_share.metadata['url']
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
- 'accountName': self._serialize.url("account_name", account_name, 'str'),
- 'shareName': self._serialize.url("share_name", share_name, 'str'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- else:
- url = next_link
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
- if skip_token is not None:
- query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ProviderShareSubscription"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
+ # Construct URL
+ url = self.reinstate.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'accountName': self._serialize.url("account_name", account_name, 'str'),
+ 'shareName': self._serialize.url("share_name", share_name, 'str'),
+ 'providerShareSubscriptionId': self._serialize.url("provider_share_subscription_id", provider_share_subscription_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
- # Construct and send request
- request = self._client.get(url, query_parameters, header_parameters)
- return request
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
- def extract_data(pipeline_response):
- deserialized = self._deserialize('ProviderShareSubscriptionList', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, iter(list_of_elem)
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- def get_next(next_link=None):
- request = prepare_request(next_link)
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(provider_share_subscription, 'ProviderShareSubscription')
+ body_content_kwargs['content'] = body_content
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.DataShareError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- if response.status_code not in [200]:
- error = self._deserialize(models.DataShareError, response)
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, model=error)
+ deserialized = self._deserialize('ProviderShareSubscription', pipeline_response)
- return pipeline_response
+ if cls:
+ return cls(pipeline_response, deserialized, {})
- return ItemPaged(
- get_next, extract_data
- )
- list_by_share.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/providerShareSubscriptions'}
+ return deserialized
+ reinstate.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/providerShareSubscriptions/{providerShareSubscriptionId}/reinstate'} # type: ignore
def _revoke_initial(
self,
@@ -201,11 +213,15 @@ def _revoke_initial(
):
# type: (...) -> "models.ProviderShareSubscription"
cls = kwargs.pop('cls', None) # type: ClsType["models.ProviderShareSubscription"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
# Construct URL
- url = self._revoke_initial.metadata['url']
+ url = self._revoke_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -221,9 +237,8 @@ def _revoke_initial(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
@@ -231,9 +246,8 @@ def _revoke_initial(
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ProviderShareSubscription', pipeline_response)
@@ -241,10 +255,10 @@ def _revoke_initial(
deserialized = self._deserialize('ProviderShareSubscription', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- _revoke_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/providerShareSubscriptions/{providerShareSubscriptionId}/revoke'}
+ _revoke_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/providerShareSubscriptions/{providerShareSubscriptionId}/revoke'} # type: ignore
def begin_revoke(
self,
@@ -254,7 +268,7 @@ def begin_revoke(
provider_share_subscription_id, # type: str
**kwargs # type: Any
):
- # type: (...) -> "models.ProviderShareSubscription"
+ # type: (...) -> LROPoller["models.ProviderShareSubscription"]
"""Revoke share subscription in a provider share.
Revoke share subscription in a provider share.
@@ -268,24 +282,34 @@ def begin_revoke(
:param provider_share_subscription_id: To locate shareSubscription.
:type provider_share_subscription_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
- :return: An instance of LROPoller that returns ProviderShareSubscription
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either ProviderShareSubscription or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~data_share_management_client.models.ProviderShareSubscription]
-
:raises ~azure.core.exceptions.HttpResponseError:
"""
- polling = kwargs.pop('polling', False) # type: Union[bool, PollingMethod]
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.ProviderShareSubscription"]
- raw_result = self._revoke_initial(
- resource_group_name=resource_group_name,
- account_name=account_name,
- share_name=share_name,
- provider_share_subscription_id=provider_share_subscription_id,
- cls=lambda x,y,z: x,
- **kwargs
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
)
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._revoke_initial(
+ resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_name=share_name,
+ provider_share_subscription_id=provider_share_subscription_id,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ProviderShareSubscription', pipeline_response)
@@ -294,17 +318,29 @@ def get_long_running_output(pipeline_response):
return cls(pipeline_response, deserialized, {})
return deserialized
- lro_delay = kwargs.get(
- 'polling_interval',
- self._config.polling_interval
- )
- if polling is True: raise ValueError("polling being True is not valid because no default polling implemetation has been defined.")
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'accountName': self._serialize.url("account_name", account_name, 'str'),
+ 'shareName': self._serialize.url("share_name", share_name, 'str'),
+ 'providerShareSubscriptionId': self._serialize.url("provider_share_subscription_id", provider_share_subscription_id, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
- begin_revoke.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/providerShareSubscriptions/{providerShareSubscriptionId}/revoke'}
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_revoke.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/providerShareSubscriptions/{providerShareSubscriptionId}/revoke'} # type: ignore
- def reinstate(
+ def get_by_share(
self,
resource_group_name, # type: str
account_name, # type: str
@@ -313,9 +349,9 @@ def reinstate(
**kwargs # type: Any
):
# type: (...) -> "models.ProviderShareSubscription"
- """Reinstate share subscription in a provider share.
+ """Get share subscription in a provider share.
- Reinstate share subscription in a provider share.
+ Get share subscription in a provider share.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -326,16 +362,20 @@ def reinstate(
:param provider_share_subscription_id: To locate shareSubscription.
:type provider_share_subscription_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: ProviderShareSubscription or the result of cls(response)
+ :return: ProviderShareSubscription, or the result of cls(response)
:rtype: ~data_share_management_client.models.ProviderShareSubscription
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ProviderShareSubscription"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
# Construct URL
- url = self.reinstate.metadata['url']
+ url = self.get_by_share.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -351,22 +391,108 @@ def reinstate(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
- request = self._client.post(url, query_parameters, header_parameters)
+ request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('ProviderShareSubscription', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- reinstate.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/providerShareSubscriptions/{providerShareSubscriptionId}/reinstate'}
+ get_by_share.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/providerShareSubscriptions/{providerShareSubscriptionId}'} # type: ignore
+
+ def list_by_share(
+ self,
+ resource_group_name, # type: str
+ account_name, # type: str
+ share_name, # type: str
+ skip_token=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.ProviderShareSubscriptionList"]
+ """List of available share subscriptions to a provider share.
+
+ List share subscriptions in a provider share.
+
+ :param resource_group_name: The resource group name.
+ :type resource_group_name: str
+ :param account_name: The name of the share account.
+ :type account_name: str
+ :param share_name: The name of the share.
+ :type share_name: str
+ :param skip_token: Continuation Token.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ProviderShareSubscriptionList or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~data_share_management_client.models.ProviderShareSubscriptionList]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ProviderShareSubscriptionList"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_share.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'accountName': self._serialize.url("account_name", account_name, 'str'),
+ 'shareName': self._serialize.url("share_name", share_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('ProviderShareSubscriptionList', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.DataShareError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list_by_share.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/providerShareSubscriptions'} # type: ignore
diff --git a/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_share_subscription_operations.py b/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_share_subscriptions_operations.py
similarity index 59%
rename from src/datashare/azext_datashare/vendored_sdks/datashare/operations/_share_subscription_operations.py
rename to src/datashare/azext_datashare/vendored_sdks/datashare/operations/_share_subscriptions_operations.py
index 59e710c1c5b..4f722b1d779 100644
--- a/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_share_subscription_operations.py
+++ b/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_share_subscriptions_operations.py
@@ -5,22 +5,28 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
+from typing import TYPE_CHECKING
import warnings
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models
-T = TypeVar('T')
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
-class ShareSubscriptionOperations(object):
- """ShareSubscriptionOperations operations.
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class ShareSubscriptionsOperations(object):
+ """ShareSubscriptionsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
@@ -41,107 +47,26 @@ def __init__(self, client, config, serializer, deserializer):
self._deserialize = deserializer
self._config = config
- def get(
+ def _cancel_synchronization_initial(
self,
resource_group_name, # type: str
account_name, # type: str
share_subscription_name, # type: str
+ share_subscription_synchronization, # type: "models.ShareSubscriptionSynchronization"
**kwargs # type: Any
):
- # type: (...) -> "models.ShareSubscription"
- """Get a shareSubscription in an account.
-
- Get shareSubscription in an account.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param account_name: The name of the share account.
- :type account_name: str
- :param share_subscription_name: The name of the shareSubscription.
- :type share_subscription_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: ShareSubscription or the result of cls(response)
- :rtype: ~data_share_management_client.models.ShareSubscription
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.ShareSubscription"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
-
- # Construct URL
- url = self.get.metadata['url']
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
- 'accountName': self._serialize.url("account_name", account_name, 'str'),
- 'shareSubscriptionName': self._serialize.url("share_subscription_name", share_subscription_name, 'str'),
+ # type: (...) -> "models.ShareSubscriptionSynchronization"
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ShareSubscriptionSynchronization"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- # Construct and send request
- request = self._client.get(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
-
- deserialized = self._deserialize('ShareSubscription', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}'}
-
- def create(
- self,
- resource_group_name, # type: str
- account_name, # type: str
- share_subscription_name, # type: str
- invitation_id, # type: str
- source_share_location, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> "models.ShareSubscription"
- """Create a shareSubscription in an account.
-
- Create shareSubscription in an account.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param account_name: The name of the share account.
- :type account_name: str
- :param share_subscription_name: The name of the shareSubscription.
- :type share_subscription_name: str
- :param invitation_id: The invitation id.
- :type invitation_id: str
- :param source_share_location: Source share location.
- :type source_share_location: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: ShareSubscription or the result of cls(response)
- :rtype: ~data_share_management_client.models.ShareSubscription or ~data_share_management_client.models.ShareSubscription
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.ShareSubscription"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
-
- _share_subscription = models.ShareSubscription(invitation_id=invitation_id, source_share_location=source_share_location)
- api_version = "2019-11-01"
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
# Construct URL
- url = self.create.metadata['url']
+ url = self._cancel_synchronization_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -157,96 +82,44 @@ def create(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(_share_subscription, 'ShareSubscription')
+ body_content = self._serialize.body(share_subscription_synchronization, 'ShareSubscriptionSynchronization')
body_content_kwargs['content'] = body_content
- request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
- if response.status_code not in [200, 201]:
+ if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = None
if response.status_code == 200:
- deserialized = self._deserialize('ShareSubscription', pipeline_response)
-
- if response.status_code == 201:
- deserialized = self._deserialize('ShareSubscription', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}'}
-
- def _delete_initial(
- self,
- resource_group_name, # type: str
- account_name, # type: str
- share_subscription_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> "models.OperationResponse"
- cls = kwargs.pop('cls', None) # type: ClsType["models.OperationResponse"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
-
- # Construct URL
- url = self._delete_initial.metadata['url']
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
- 'accountName': self._serialize.url("account_name", account_name, 'str'),
- 'shareSubscriptionName': self._serialize.url("share_subscription_name", share_subscription_name, 'str'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- # Construct and send request
- request = self._client.delete(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 202, 204]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ deserialized = self._deserialize('ShareSubscriptionSynchronization', pipeline_response)
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize('OperationResponse', pipeline_response)
+ if response.status_code == 202:
+ deserialized = self._deserialize('ShareSubscriptionSynchronization', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}'}
+ _cancel_synchronization_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/cancelSynchronization'} # type: ignore
- def begin_delete(
+ def begin_cancel_synchronization(
self,
resource_group_name, # type: str
account_name, # type: str
share_subscription_name, # type: str
+ share_subscription_synchronization, # type: "models.ShareSubscriptionSynchronization"
**kwargs # type: Any
):
- # type: (...) -> "models.OperationResponse"
- """Delete a shareSubscription in an account.
+ # type: (...) -> LROPoller["models.ShareSubscriptionSynchronization"]
+ """Request cancellation of a data share snapshot.
- Delete shareSubscription in an account.
+ Request to cancel a synchronization.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -254,122 +127,67 @@ def begin_delete(
:type account_name: str
:param share_subscription_name: The name of the shareSubscription.
:type share_subscription_name: str
+ :param share_subscription_synchronization: Share Subscription Synchronization payload.
+ :type share_subscription_synchronization: ~data_share_management_client.models.ShareSubscriptionSynchronization
:keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
- :return: An instance of LROPoller that returns OperationResponse
- :rtype: ~azure.core.polling.LROPoller[~data_share_management_client.models.OperationResponse]
-
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either ShareSubscriptionSynchronization or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~data_share_management_client.models.ShareSubscriptionSynchronization]
:raises ~azure.core.exceptions.HttpResponseError:
"""
- polling = kwargs.pop('polling', False) # type: Union[bool, PollingMethod]
- cls = kwargs.pop('cls', None) # type: ClsType["models.OperationResponse"]
- raw_result = self._delete_initial(
- resource_group_name=resource_group_name,
- account_name=account_name,
- share_subscription_name=share_subscription_name,
- cls=lambda x,y,z: x,
- **kwargs
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ShareSubscriptionSynchronization"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
)
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._cancel_synchronization_initial(
+ resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_subscription_name=share_subscription_name,
+ share_subscription_synchronization=share_subscription_synchronization,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize('OperationResponse', pipeline_response)
+ deserialized = self._deserialize('ShareSubscriptionSynchronization', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
- lro_delay = kwargs.get(
- 'polling_interval',
- self._config.polling_interval
- )
- if polling is True: raise ValueError("polling being True is not valid because no default polling implemetation has been defined.")
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'accountName': self._serialize.url("account_name", account_name, 'str'),
+ 'shareSubscriptionName': self._serialize.url("share_subscription_name", share_subscription_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
- begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}'}
-
- def list_by_account(
- self,
- resource_group_name, # type: str
- account_name, # type: str
- skip_token=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.ShareSubscriptionList"
- """List share subscriptions in an account.
-
- List of available share subscriptions under an account.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param account_name: The name of the share account.
- :type account_name: str
- :param skip_token: Continuation Token.
- :type skip_token: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: ShareSubscriptionList or the result of cls(response)
- :rtype: ~data_share_management_client.models.ShareSubscriptionList
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.ShareSubscriptionList"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
-
- def prepare_request(next_link=None):
- if not next_link:
- # Construct URL
- url = self.list_by_account.metadata['url']
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
- 'accountName': self._serialize.url("account_name", account_name, 'str'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- else:
- url = next_link
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
- if skip_token is not None:
- query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- # Construct and send request
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- def extract_data(pipeline_response):
- deserialized = self._deserialize('ShareSubscriptionList', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, iter(list_of_elem)
-
- def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- error = self._deserialize(models.DataShareError, response)
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, model=error)
-
- return pipeline_response
-
- return ItemPaged(
- get_next, extract_data
- )
- list_by_account.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions'}
-
- def list_source_share_synchronization_setting(
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_cancel_synchronization.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/cancelSynchronization'} # type: ignore
+
+ def list_source_share_synchronization_settings(
self,
resource_group_name, # type: str
account_name, # type: str
@@ -377,10 +195,10 @@ def list_source_share_synchronization_setting(
skip_token=None, # type: Optional[str]
**kwargs # type: Any
):
- # type: (...) -> "models.SourceShareSynchronizationSettingList"
- """Get synchronization settings set on a share.
+ # type: (...) -> Iterable["models.SourceShareSynchronizationSettingList"]
+ """Get source share synchronization settings for a shareSubscription.
- Get source share synchronization settings for a shareSubscription.
+ Get synchronization settings set on a share.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -391,18 +209,26 @@ def list_source_share_synchronization_setting(
:param skip_token: Continuation token.
:type skip_token: str
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: SourceShareSynchronizationSettingList or the result of cls(response)
- :rtype: ~data_share_management_client.models.SourceShareSynchronizationSettingList
+ :return: An iterator like instance of either SourceShareSynchronizationSettingList or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~data_share_management_client.models.SourceShareSynchronizationSettingList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.SourceShareSynchronizationSettingList"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
if not next_link:
# Construct URL
- url = self.list_source_share_synchronization_setting.metadata['url']
+ url = self.list_source_share_synchronization_settings.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -410,21 +236,17 @@ def prepare_request(next_link=None):
'shareSubscriptionName': self._serialize.url("share_subscription_name", share_subscription_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
else:
url = next_link
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
- if skip_token is not None:
- query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- # Construct and send request
- request = self._client.post(url, query_parameters, header_parameters)
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
@@ -443,27 +265,30 @@ def get_next(next_link=None):
if response.status_code not in [200]:
error = self._deserialize(models.DataShareError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
- list_source_share_synchronization_setting.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/listSourceShareSynchronizationSettings'}
+ list_source_share_synchronization_settings.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/listSourceShareSynchronizationSettings'} # type: ignore
- def list_synchronization(
+ def list_synchronization_details(
self,
resource_group_name, # type: str
account_name, # type: str
share_subscription_name, # type: str
+ share_subscription_synchronization, # type: "models.ShareSubscriptionSynchronization"
skip_token=None, # type: Optional[str]
+ filter=None, # type: Optional[str]
+ orderby=None, # type: Optional[str]
**kwargs # type: Any
):
- # type: (...) -> "models.ShareSubscriptionSynchronizationList"
- """List synchronizations of a share subscription.
+ # type: (...) -> Iterable["models.SynchronizationDetailsList"]
+ """List data set level details for a share subscription synchronization.
- List Synchronizations in a share subscription.
+ List synchronization details.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -471,21 +296,37 @@ def list_synchronization(
:type account_name: str
:param share_subscription_name: The name of the share subscription.
:type share_subscription_name: str
+ :param share_subscription_synchronization: Share Subscription Synchronization payload.
+ :type share_subscription_synchronization: ~data_share_management_client.models.ShareSubscriptionSynchronization
:param skip_token: Continuation token.
:type skip_token: str
+ :param filter: Filters the results using OData syntax.
+ :type filter: str
+ :param orderby: Sorts the results using OData syntax.
+ :type orderby: str
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: ShareSubscriptionSynchronizationList or the result of cls(response)
- :rtype: ~data_share_management_client.models.ShareSubscriptionSynchronizationList
+ :return: An iterator like instance of either SynchronizationDetailsList or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~data_share_management_client.models.SynchronizationDetailsList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
- cls = kwargs.pop('cls', None) # type: ClsType["models.ShareSubscriptionSynchronizationList"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ cls = kwargs.pop('cls', None) # type: ClsType["models.SynchronizationDetailsList"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ content_type = "application/json"
+ accept = "application/json"
def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
if not next_link:
# Construct URL
- url = self.list_synchronization.metadata['url']
+ url = self.list_synchronization_details.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -493,25 +334,31 @@ def prepare_request(next_link=None):
'shareSubscriptionName': self._serialize.url("share_subscription_name", share_subscription_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ if filter is not None:
+ query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
+ if orderby is not None:
+ query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(share_subscription_synchronization, 'ShareSubscriptionSynchronization')
+ body_content_kwargs['content'] = body_content
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
else:
url = next_link
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
- if skip_token is not None:
- query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- # Construct and send request
- request = self._client.post(url, query_parameters, header_parameters)
+ query_parameters = {} # type: Dict[str, Any]
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(share_subscription_synchronization, 'ShareSubscriptionSynchronization')
+ body_content_kwargs['content'] = body_content
+ request = self._client.get(url, query_parameters, header_parameters, **body_content_kwargs)
return request
def extract_data(pipeline_response):
- deserialized = self._deserialize('ShareSubscriptionSynchronizationList', pipeline_response)
+ deserialized = self._deserialize('SynchronizationDetailsList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
@@ -526,28 +373,29 @@ def get_next(next_link=None):
if response.status_code not in [200]:
error = self._deserialize(models.DataShareError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
- list_synchronization.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/listSynchronizations'}
+ list_synchronization_details.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/listSynchronizationDetails'} # type: ignore
- def list_synchronization_detail(
+ def list_synchronizations(
self,
resource_group_name, # type: str
account_name, # type: str
share_subscription_name, # type: str
- synchronization_id, # type: str
skip_token=None, # type: Optional[str]
+ filter=None, # type: Optional[str]
+ orderby=None, # type: Optional[str]
**kwargs # type: Any
):
- # type: (...) -> "models.SynchronizationDetailsList"
- """List synchronization details.
+ # type: (...) -> Iterable["models.ShareSubscriptionSynchronizationList"]
+ """List Synchronizations in a share subscription.
- List data set level details for a share subscription synchronization.
+ List synchronizations of a share subscription.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -555,25 +403,33 @@ def list_synchronization_detail(
:type account_name: str
:param share_subscription_name: The name of the share subscription.
:type share_subscription_name: str
- :param synchronization_id: Synchronization id.
- :type synchronization_id: str
:param skip_token: Continuation token.
:type skip_token: str
+ :param filter: Filters the results using OData syntax.
+ :type filter: str
+ :param orderby: Sorts the results using OData syntax.
+ :type orderby: str
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: SynchronizationDetailsList or the result of cls(response)
- :rtype: ~data_share_management_client.models.SynchronizationDetailsList
+ :return: An iterator like instance of either ShareSubscriptionSynchronizationList or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~data_share_management_client.models.ShareSubscriptionSynchronizationList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
- cls = kwargs.pop('cls', None) # type: ClsType["models.SynchronizationDetailsList"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- _share_subscription_synchronization = models.ShareSubscriptionSynchronization(synchronization_id=synchronization_id)
- api_version = "2019-11-01"
- content_type = "application/json"
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ShareSubscriptionSynchronizationList"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
if not next_link:
# Construct URL
- url = self.list_synchronization_detail.metadata['url']
+ url = self.list_synchronizations.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -581,30 +437,25 @@ def prepare_request(next_link=None):
'shareSubscriptionName': self._serialize.url("share_subscription_name", share_subscription_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ if filter is not None:
+ query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
+ if orderby is not None:
+ query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
else:
url = next_link
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
- if skip_token is not None:
- query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- # Construct and send request
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(_share_subscription_synchronization, 'ShareSubscriptionSynchronization')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
- deserialized = self._deserialize('SynchronizationDetailsList', pipeline_response)
+ deserialized = self._deserialize('ShareSubscriptionSynchronizationList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
@@ -619,33 +470,35 @@ def get_next(next_link=None):
if response.status_code not in [200]:
error = self._deserialize(models.DataShareError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
- list_synchronization_detail.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/listSynchronizationDetails'}
+ list_synchronizations.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/listSynchronizations'} # type: ignore
def _synchronize_initial(
self,
resource_group_name, # type: str
account_name, # type: str
share_subscription_name, # type: str
- synchronization_mode=None, # type: Optional[Union[str, "models.SynchronizationMode"]]
+ synchronize, # type: "models.Synchronize"
**kwargs # type: Any
):
# type: (...) -> "models.ShareSubscriptionSynchronization"
cls = kwargs.pop('cls', None) # type: ClsType["models.ShareSubscriptionSynchronization"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
-
- _synchronize = models.Synchronize(synchronization_mode=synchronization_mode)
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
# Construct URL
- url = self._synchronize_initial.metadata['url']
+ url = self._synchronize_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -661,23 +514,20 @@ def _synchronize_initial(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(_synchronize, 'Synchronize')
+ body_content = self._serialize.body(synchronize, 'Synchronize')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ShareSubscriptionSynchronization', pipeline_response)
@@ -685,23 +535,23 @@ def _synchronize_initial(
deserialized = self._deserialize('ShareSubscriptionSynchronization', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- _synchronize_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/Synchronize'}
+ _synchronize_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/synchronize'} # type: ignore
def begin_synchronize(
self,
resource_group_name, # type: str
account_name, # type: str
share_subscription_name, # type: str
- synchronization_mode=None, # type: Optional[Union[str, "models.SynchronizationMode"]]
+ synchronize, # type: "models.Synchronize"
**kwargs # type: Any
):
- # type: (...) -> "models.ShareSubscriptionSynchronization"
- """Initiate a copy.
+ # type: (...) -> LROPoller["models.ShareSubscriptionSynchronization"]
+ """Initiate an asynchronous data share job.
- Initiate an asynchronous data share job.
+ Initiate a copy.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -709,28 +559,37 @@ def begin_synchronize(
:type account_name: str
:param share_subscription_name: The name of share subscription.
:type share_subscription_name: str
- :param synchronization_mode: Mode of synchronization used in triggers and snapshot sync.
- Incremental by default.
- :type synchronization_mode: str or ~data_share_management_client.models.SynchronizationMode
+ :param synchronize: Synchronize payload.
+ :type synchronize: ~data_share_management_client.models.Synchronize
:keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
- :return: An instance of LROPoller that returns ShareSubscriptionSynchronization
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either ShareSubscriptionSynchronization or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~data_share_management_client.models.ShareSubscriptionSynchronization]
-
:raises ~azure.core.exceptions.HttpResponseError:
"""
- polling = kwargs.pop('polling', False) # type: Union[bool, PollingMethod]
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.ShareSubscriptionSynchronization"]
- raw_result = self._synchronize_initial(
- resource_group_name=resource_group_name,
- account_name=account_name,
- share_subscription_name=share_subscription_name,
- synchronization_mode=synchronization_mode,
- cls=lambda x,y,z: x,
- **kwargs
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
)
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._synchronize_initial(
+ resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_subscription_name=share_subscription_name,
+ synchronize=synchronize,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ShareSubscriptionSynchronization', pipeline_response)
@@ -739,34 +598,130 @@ def get_long_running_output(pipeline_response):
return cls(pipeline_response, deserialized, {})
return deserialized
- lro_delay = kwargs.get(
- 'polling_interval',
- self._config.polling_interval
- )
- if polling is True: raise ValueError("polling being True is not valid because no default polling implemetation has been defined.")
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'accountName': self._serialize.url("account_name", account_name, 'str'),
+ 'shareSubscriptionName': self._serialize.url("share_subscription_name", share_subscription_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
- begin_synchronize.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/Synchronize'}
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_synchronize.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/synchronize'} # type: ignore
- def _cancel_synchronization_initial(
+ def get(
self,
resource_group_name, # type: str
account_name, # type: str
share_subscription_name, # type: str
- synchronization_id, # type: str
**kwargs # type: Any
):
- # type: (...) -> "models.ShareSubscriptionSynchronization"
- cls = kwargs.pop('cls', None) # type: ClsType["models.ShareSubscriptionSynchronization"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
+ # type: (...) -> "models.ShareSubscription"
+ """Get shareSubscription in an account.
+
+ Get a shareSubscription in an account.
- _share_subscription_synchronization = models.ShareSubscriptionSynchronization(synchronization_id=synchronization_id)
- api_version = "2019-11-01"
+ :param resource_group_name: The resource group name.
+ :type resource_group_name: str
+ :param account_name: The name of the share account.
+ :type account_name: str
+ :param share_subscription_name: The name of the shareSubscription.
+ :type share_subscription_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ShareSubscription, or the result of cls(response)
+ :rtype: ~data_share_management_client.models.ShareSubscription
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ShareSubscription"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'accountName': self._serialize.url("account_name", account_name, 'str'),
+ 'shareSubscriptionName': self._serialize.url("share_subscription_name", share_subscription_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.DataShareError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ShareSubscription', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}'} # type: ignore
+
+ def create(
+ self,
+ resource_group_name, # type: str
+ account_name, # type: str
+ share_subscription_name, # type: str
+ share_subscription, # type: "models.ShareSubscription"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ShareSubscription"
+ """Create shareSubscription in an account.
+
+ Create a shareSubscription in an account.
+
+ :param resource_group_name: The resource group name.
+ :type resource_group_name: str
+ :param account_name: The name of the share account.
+ :type account_name: str
+ :param share_subscription_name: The name of the shareSubscription.
+ :type share_subscription_name: str
+ :param share_subscription: create parameters for shareSubscription.
+ :type share_subscription: ~data_share_management_client.models.ShareSubscription
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ShareSubscription, or the result of cls(response)
+ :rtype: ~data_share_management_client.models.ShareSubscription
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ShareSubscription"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
# Construct URL
- url = self._cancel_synchronization_initial.metadata['url']
+ url = self.create.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -782,47 +737,96 @@ def _cancel_synchronization_initial(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(_share_subscription_synchronization, 'ShareSubscriptionSynchronization')
+ body_content = self._serialize.body(share_subscription, 'ShareSubscription')
body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.DataShareError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('ShareSubscription', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('ShareSubscription', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+ create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}'} # type: ignore
+
+ def _delete_initial(
+ self,
+ resource_group_name, # type: str
+ account_name, # type: str
+ share_subscription_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Optional["models.OperationResponse"]
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.OperationResponse"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._delete_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'accountName': self._serialize.url("account_name", account_name, 'str'),
+ 'shareSubscriptionName': self._serialize.url("share_subscription_name", share_subscription_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
- if response.status_code not in [200, 202]:
+ if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
- deserialized = self._deserialize('ShareSubscriptionSynchronization', pipeline_response)
-
- if response.status_code == 202:
- deserialized = self._deserialize('ShareSubscriptionSynchronization', pipeline_response)
+ deserialized = self._deserialize('OperationResponse', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- _cancel_synchronization_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/cancelSynchronization'}
+ _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}'} # type: ignore
- def begin_cancel_synchronization(
+ def begin_delete(
self,
resource_group_name, # type: str
account_name, # type: str
share_subscription_name, # type: str
- synchronization_id, # type: str
**kwargs # type: Any
):
- # type: (...) -> "models.ShareSubscriptionSynchronization"
- """Request to cancel a synchronization.
+ # type: (...) -> LROPoller["models.OperationResponse"]
+ """Delete shareSubscription in an account.
- Request cancellation of a data share snapshot.
+ Delete a shareSubscription in an account.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -830,41 +834,152 @@ def begin_cancel_synchronization(
:type account_name: str
:param share_subscription_name: The name of the shareSubscription.
:type share_subscription_name: str
- :param synchronization_id: Synchronization id.
- :type synchronization_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
- :return: An instance of LROPoller that returns ShareSubscriptionSynchronization
- :rtype: ~azure.core.polling.LROPoller[~data_share_management_client.models.ShareSubscriptionSynchronization]
-
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either OperationResponse or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~data_share_management_client.models.OperationResponse]
:raises ~azure.core.exceptions.HttpResponseError:
"""
- polling = kwargs.pop('polling', False) # type: Union[bool, PollingMethod]
- cls = kwargs.pop('cls', None) # type: ClsType["models.ShareSubscriptionSynchronization"]
- raw_result = self._cancel_synchronization_initial(
- resource_group_name=resource_group_name,
- account_name=account_name,
- share_subscription_name=share_subscription_name,
- synchronization_id=synchronization_id,
- cls=lambda x,y,z: x,
- **kwargs
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OperationResponse"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
)
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._delete_initial(
+ resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_subscription_name=share_subscription_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
- deserialized = self._deserialize('ShareSubscriptionSynchronization', pipeline_response)
+ deserialized = self._deserialize('OperationResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
- lro_delay = kwargs.get(
- 'polling_interval',
- self._config.polling_interval
- )
- if polling is True: raise ValueError("polling being True is not valid because no default polling implemetation has been defined.")
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'accountName': self._serialize.url("account_name", account_name, 'str'),
+ 'shareSubscriptionName': self._serialize.url("share_subscription_name", share_subscription_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
- begin_cancel_synchronization.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/cancelSynchronization'}
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}'} # type: ignore
+
+ def list_by_account(
+ self,
+ resource_group_name, # type: str
+ account_name, # type: str
+ skip_token=None, # type: Optional[str]
+ filter=None, # type: Optional[str]
+ orderby=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.ShareSubscriptionList"]
+ """List of available share subscriptions under an account.
+
+ List share subscriptions in an account.
+
+ :param resource_group_name: The resource group name.
+ :type resource_group_name: str
+ :param account_name: The name of the share account.
+ :type account_name: str
+ :param skip_token: Continuation Token.
+ :type skip_token: str
+ :param filter: Filters the results using OData syntax.
+ :type filter: str
+ :param orderby: Sorts the results using OData syntax.
+ :type orderby: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ShareSubscriptionList or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~data_share_management_client.models.ShareSubscriptionList]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ShareSubscriptionList"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_account.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'accountName': self._serialize.url("account_name", account_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ if filter is not None:
+ query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
+ if orderby is not None:
+ query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('ShareSubscriptionList', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.DataShareError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list_by_account.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions'} # type: ignore
diff --git a/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_share_operations.py b/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_shares_operations.py
similarity index 59%
rename from src/datashare/azext_datashare/vendored_sdks/datashare/operations/_share_operations.py
rename to src/datashare/azext_datashare/vendored_sdks/datashare/operations/_shares_operations.py
index a891d441424..cc91679dde1 100644
--- a/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_share_operations.py
+++ b/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_shares_operations.py
@@ -5,23 +5,28 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-import datetime
-from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
+from typing import TYPE_CHECKING
import warnings
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models
-T = TypeVar('T')
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
-class ShareOperations(object):
- """ShareOperations operations.
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class SharesOperations(object):
+ """SharesOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
@@ -42,6 +47,211 @@ def __init__(self, client, config, serializer, deserializer):
self._deserialize = deserializer
self._config = config
+ def list_synchronization_details(
+ self,
+ resource_group_name, # type: str
+ account_name, # type: str
+ share_name, # type: str
+ share_synchronization, # type: "models.ShareSynchronization"
+ skip_token=None, # type: Optional[str]
+ filter=None, # type: Optional[str]
+ orderby=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.SynchronizationDetailsList"]
+ """List data set level details for a share synchronization.
+
+ List synchronization details.
+
+ :param resource_group_name: The resource group name.
+ :type resource_group_name: str
+ :param account_name: The name of the share account.
+ :type account_name: str
+ :param share_name: The name of the share.
+ :type share_name: str
+ :param share_synchronization: Share Synchronization payload.
+ :type share_synchronization: ~data_share_management_client.models.ShareSynchronization
+ :param skip_token: Continuation token.
+ :type skip_token: str
+ :param filter: Filters the results using OData syntax.
+ :type filter: str
+ :param orderby: Sorts the results using OData syntax.
+ :type orderby: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either SynchronizationDetailsList or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~data_share_management_client.models.SynchronizationDetailsList]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.SynchronizationDetailsList"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ content_type = "application/json"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_synchronization_details.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'accountName': self._serialize.url("account_name", account_name, 'str'),
+ 'shareName': self._serialize.url("share_name", share_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ if filter is not None:
+ query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
+ if orderby is not None:
+ query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(share_synchronization, 'ShareSynchronization')
+ body_content_kwargs['content'] = body_content
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(share_synchronization, 'ShareSynchronization')
+ body_content_kwargs['content'] = body_content
+ request = self._client.get(url, query_parameters, header_parameters, **body_content_kwargs)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('SynchronizationDetailsList', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.DataShareError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list_synchronization_details.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/listSynchronizationDetails'} # type: ignore
+
+ def list_synchronizations(
+ self,
+ resource_group_name, # type: str
+ account_name, # type: str
+ share_name, # type: str
+ skip_token=None, # type: Optional[str]
+ filter=None, # type: Optional[str]
+ orderby=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.ShareSynchronizationList"]
+ """List Synchronizations in a share.
+
+ List synchronizations of a share.
+
+ :param resource_group_name: The resource group name.
+ :type resource_group_name: str
+ :param account_name: The name of the share account.
+ :type account_name: str
+ :param share_name: The name of the share.
+ :type share_name: str
+ :param skip_token: Continuation token.
+ :type skip_token: str
+ :param filter: Filters the results using OData syntax.
+ :type filter: str
+ :param orderby: Sorts the results using OData syntax.
+ :type orderby: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ShareSynchronizationList or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~data_share_management_client.models.ShareSynchronizationList]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ShareSynchronizationList"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_synchronizations.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'accountName': self._serialize.url("account_name", account_name, 'str'),
+ 'shareName': self._serialize.url("share_name", share_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ if filter is not None:
+ query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
+ if orderby is not None:
+ query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('ShareSynchronizationList', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.DataShareError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list_synchronizations.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/listSynchronizations'} # type: ignore
+
def get(
self,
resource_group_name, # type: str
@@ -50,9 +260,9 @@ def get(
**kwargs # type: Any
):
# type: (...) -> "models.Share"
- """Get a share.
+ """Get a specified share.
- Get a specified share.
+ Get a share.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -61,16 +271,20 @@ def get(
:param share_name: The name of the share to retrieve.
:type share_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: Share or the result of cls(response)
+ :return: Share, or the result of cls(response)
:rtype: ~data_share_management_client.models.Share
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.Share"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
# Construct URL
- url = self.get.metadata['url']
+ url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -85,9 +299,8 @@ def get(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
@@ -95,30 +308,28 @@ def get(
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('Share', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}'}
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}'} # type: ignore
def create(
self,
resource_group_name, # type: str
account_name, # type: str
share_name, # type: str
- description=None, # type: Optional[str]
- share_kind=None, # type: Optional[Union[str, "models.ShareKind"]]
- terms=None, # type: Optional[str]
+ share, # type: "models.Share"
**kwargs # type: Any
):
# type: (...) -> "models.Share"
- """Create a share.
+ """Create a share in the given account.
- Create a share in the given account.
+ Create a share.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -126,26 +337,24 @@ def create(
:type account_name: str
:param share_name: The name of the share.
:type share_name: str
- :param description: Share description.
- :type description: str
- :param share_kind: Share kind.
- :type share_kind: str or ~data_share_management_client.models.ShareKind
- :param terms: Share terms.
- :type terms: str
+ :param share: The share payload.
+ :type share: ~data_share_management_client.models.Share
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: Share or the result of cls(response)
- :rtype: ~data_share_management_client.models.Share or ~data_share_management_client.models.Share
+ :return: Share, or the result of cls(response)
+ :rtype: ~data_share_management_client.models.Share
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.Share"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
-
- _share = models.Share(description=description, share_kind=share_kind, terms=terms)
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
# Construct URL
- url = self.create.metadata['url']
+ url = self.create.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -161,23 +370,20 @@ def create(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(_share, 'Share')
+ body_content = self._serialize.body(share, 'Share')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Share', pipeline_response)
@@ -185,10 +391,10 @@ def create(
deserialized = self._deserialize('Share', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}'}
+ create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}'} # type: ignore
def _delete_initial(
self,
@@ -197,13 +403,17 @@ def _delete_initial(
share_name, # type: str
**kwargs # type: Any
):
- # type: (...) -> "models.OperationResponse"
- cls = kwargs.pop('cls', None) # type: ClsType["models.OperationResponse"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ # type: (...) -> Optional["models.OperationResponse"]
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.OperationResponse"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
# Construct URL
- url = self._delete_initial.metadata['url']
+ url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -218,9 +428,8 @@ def _delete_initial(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
@@ -228,17 +437,17 @@ def _delete_initial(
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationResponse', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}'}
+ _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}'} # type: ignore
def begin_delete(
self,
@@ -247,10 +456,10 @@ def begin_delete(
share_name, # type: str
**kwargs # type: Any
):
- # type: (...) -> "models.OperationResponse"
- """Delete a share.
+ # type: (...) -> LROPoller["models.OperationResponse"]
+ """Deletes a share.
- Deletes a share.
+ Delete a share.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -259,23 +468,33 @@ def begin_delete(
:param share_name: The name of the share.
:type share_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
- :return: An instance of LROPoller that returns OperationResponse
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either OperationResponse or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~data_share_management_client.models.OperationResponse]
-
:raises ~azure.core.exceptions.HttpResponseError:
"""
- polling = kwargs.pop('polling', False) # type: Union[bool, PollingMethod]
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.OperationResponse"]
- raw_result = self._delete_initial(
- resource_group_name=resource_group_name,
- account_name=account_name,
- share_name=share_name,
- cls=lambda x,y,z: x,
- **kwargs
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
)
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._delete_initial(
+ resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_name=share_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('OperationResponse', pipeline_response)
@@ -284,27 +503,40 @@ def get_long_running_output(pipeline_response):
return cls(pipeline_response, deserialized, {})
return deserialized
- lro_delay = kwargs.get(
- 'polling_interval',
- self._config.polling_interval
- )
- if polling is True: raise ValueError("polling being True is not valid because no default polling implemetation has been defined.")
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'accountName': self._serialize.url("account_name", account_name, 'str'),
+ 'shareName': self._serialize.url("share_name", share_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
- begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}'}
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}'} # type: ignore
def list_by_account(
self,
resource_group_name, # type: str
account_name, # type: str
skip_token=None, # type: Optional[str]
+ filter=None, # type: Optional[str]
+ orderby=None, # type: Optional[str]
**kwargs # type: Any
):
- # type: (...) -> "models.ShareList"
- """List shares in an account.
+ # type: (...) -> Iterable["models.ShareList"]
+ """List of available shares under an account.
- List of available shares under an account.
+ List shares in an account.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -312,244 +544,56 @@ def list_by_account(
:type account_name: str
:param skip_token: Continuation Token.
:type skip_token: str
+ :param filter: Filters the results using OData syntax.
+ :type filter: str
+ :param orderby: Sorts the results using OData syntax.
+ :type orderby: str
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: ShareList or the result of cls(response)
- :rtype: ~data_share_management_client.models.ShareList
+ :return: An iterator like instance of either ShareList or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~data_share_management_client.models.ShareList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ShareList"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
def prepare_request(next_link=None):
- if not next_link:
- # Construct URL
- url = self.list_by_account.metadata['url']
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
- 'accountName': self._serialize.url("account_name", account_name, 'str'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- else:
- url = next_link
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
- if skip_token is not None:
- query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
-
# Construct headers
header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- # Construct and send request
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- def extract_data(pipeline_response):
- deserialized = self._deserialize('ShareList', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, iter(list_of_elem)
-
- def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- error = self._deserialize(models.DataShareError, response)
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, model=error)
-
- return pipeline_response
-
- return ItemPaged(
- get_next, extract_data
- )
- list_by_account.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares'}
-
- def list_synchronization(
- self,
- resource_group_name, # type: str
- account_name, # type: str
- share_name, # type: str
- skip_token=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.ShareSynchronizationList"
- """List synchronizations of a share.
-
- List Synchronizations in a share.
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param account_name: The name of the share account.
- :type account_name: str
- :param share_name: The name of the share.
- :type share_name: str
- :param skip_token: Continuation token.
- :type skip_token: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: ShareSynchronizationList or the result of cls(response)
- :rtype: ~data_share_management_client.models.ShareSynchronizationList
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.ShareSynchronizationList"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
-
- def prepare_request(next_link=None):
if not next_link:
# Construct URL
- url = self.list_synchronization.metadata['url']
+ url = self.list_by_account.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'accountName': self._serialize.url("account_name", account_name, 'str'),
- 'shareName': self._serialize.url("share_name", share_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ if filter is not None:
+ query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
+ if orderby is not None:
+ query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
- if skip_token is not None:
- query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- # Construct and send request
- request = self._client.post(url, query_parameters, header_parameters)
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
- deserialized = self._deserialize('ShareSynchronizationList', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, iter(list_of_elem)
-
- def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- error = self._deserialize(models.DataShareError, response)
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, model=error)
-
- return pipeline_response
-
- return ItemPaged(
- get_next, extract_data
- )
- list_synchronization.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/listSynchronizations'}
-
- def list_synchronization_detail(
- self,
- resource_group_name, # type: str
- account_name, # type: str
- share_name, # type: str
- skip_token=None, # type: Optional[str]
- consumer_email=None, # type: Optional[str]
- consumer_name=None, # type: Optional[str]
- consumer_tenant_name=None, # type: Optional[str]
- duration_ms=None, # type: Optional[int]
- end_time=None, # type: Optional[datetime.datetime]
- message=None, # type: Optional[str]
- start_time=None, # type: Optional[datetime.datetime]
- status=None, # type: Optional[str]
- synchronization_id=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.SynchronizationDetailsList"
- """List synchronization details.
-
- List data set level details for a share synchronization.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param account_name: The name of the share account.
- :type account_name: str
- :param share_name: The name of the share.
- :type share_name: str
- :param skip_token: Continuation token.
- :type skip_token: str
- :param consumer_email: Email of the user who created the synchronization.
- :type consumer_email: str
- :param consumer_name: Name of the user who created the synchronization.
- :type consumer_name: str
- :param consumer_tenant_name: Tenant name of the consumer who created the synchronization.
- :type consumer_tenant_name: str
- :param duration_ms: synchronization duration.
- :type duration_ms: int
- :param end_time: End time of synchronization.
- :type end_time: ~datetime.datetime
- :param message: message of synchronization.
- :type message: str
- :param start_time: start time of synchronization.
- :type start_time: ~datetime.datetime
- :param status: Raw Status.
- :type status: str
- :param synchronization_id: Synchronization id.
- :type synchronization_id: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: SynchronizationDetailsList or the result of cls(response)
- :rtype: ~data_share_management_client.models.SynchronizationDetailsList
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.SynchronizationDetailsList"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- _share_synchronization = models.ShareSynchronization(consumer_email=consumer_email, consumer_name=consumer_name, consumer_tenant_name=consumer_tenant_name, duration_ms=duration_ms, end_time=end_time, message=message, start_time=start_time, status=status, synchronization_id=synchronization_id)
- api_version = "2019-11-01"
- content_type = "application/json"
-
- def prepare_request(next_link=None):
- if not next_link:
- # Construct URL
- url = self.list_synchronization_detail.metadata['url']
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
- 'accountName': self._serialize.url("account_name", account_name, 'str'),
- 'shareName': self._serialize.url("share_name", share_name, 'str'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- else:
- url = next_link
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
- if skip_token is not None:
- query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- # Construct and send request
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(_share_synchronization, 'ShareSynchronization')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- return request
-
- def extract_data(pipeline_response):
- deserialized = self._deserialize('SynchronizationDetailsList', pipeline_response)
+ deserialized = self._deserialize('ShareList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
@@ -564,11 +608,11 @@ def get_next(next_link=None):
if response.status_code not in [200]:
error = self._deserialize(models.DataShareError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
- list_synchronization_detail.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/listSynchronizationDetails'}
+ list_by_account.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares'} # type: ignore
diff --git a/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_synchronization_setting_operations.py b/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_synchronization_settings_operations.py
similarity index 69%
rename from src/datashare/azext_datashare/vendored_sdks/datashare/operations/_synchronization_setting_operations.py
rename to src/datashare/azext_datashare/vendored_sdks/datashare/operations/_synchronization_settings_operations.py
index 4b59370d082..3dbcdb4481a 100644
--- a/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_synchronization_setting_operations.py
+++ b/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_synchronization_settings_operations.py
@@ -5,22 +5,28 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
+from typing import TYPE_CHECKING
import warnings
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models
-T = TypeVar('T')
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
-class SynchronizationSettingOperations(object):
- """SynchronizationSettingOperations operations.
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class SynchronizationSettingsOperations(object):
+ """SynchronizationSettingsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
@@ -50,9 +56,9 @@ def get(
**kwargs # type: Any
):
# type: (...) -> "models.SynchronizationSetting"
- """Get a synchronizationSetting in a share.
+ """Get synchronizationSetting in a share.
- Get synchronizationSetting in a share.
+ Get a synchronizationSetting in a share.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -63,16 +69,20 @@ def get(
:param synchronization_setting_name: The name of the synchronizationSetting.
:type synchronization_setting_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: SynchronizationSetting or the result of cls(response)
+ :return: SynchronizationSetting, or the result of cls(response)
:rtype: ~data_share_management_client.models.SynchronizationSetting
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.SynchronizationSetting"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
# Construct URL
- url = self.get.metadata['url']
+ url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -88,9 +98,8 @@ def get(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
@@ -98,15 +107,15 @@ def get(
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('SynchronizationSetting', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/synchronizationSettings/{synchronizationSettingName}'}
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/synchronizationSettings/{synchronizationSettingName}'} # type: ignore
def create(
self,
@@ -118,9 +127,9 @@ def create(
**kwargs # type: Any
):
# type: (...) -> "models.SynchronizationSetting"
- """Create or update a synchronizationSetting.
+ """Adds a new synchronization setting to an existing share.
- Adds a new synchronization setting to an existing share.
+ Create a synchronizationSetting.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -133,17 +142,21 @@ def create(
:param synchronization_setting: The new synchronization setting information.
:type synchronization_setting: ~data_share_management_client.models.SynchronizationSetting
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: SynchronizationSetting or the result of cls(response)
- :rtype: ~data_share_management_client.models.SynchronizationSetting or ~data_share_management_client.models.SynchronizationSetting
+ :return: SynchronizationSetting, or the result of cls(response)
+ :rtype: ~data_share_management_client.models.SynchronizationSetting
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.SynchronizationSetting"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
# Construct URL
- url = self.create.metadata['url']
+ url = self.create.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -160,23 +173,20 @@ def create(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(synchronization_setting, 'SynchronizationSetting')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('SynchronizationSetting', pipeline_response)
@@ -184,10 +194,10 @@ def create(
deserialized = self._deserialize('SynchronizationSetting', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/synchronizationSettings/{synchronizationSettingName}'}
+ create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/synchronizationSettings/{synchronizationSettingName}'} # type: ignore
def _delete_initial(
self,
@@ -197,13 +207,17 @@ def _delete_initial(
synchronization_setting_name, # type: str
**kwargs # type: Any
):
- # type: (...) -> "models.OperationResponse"
- cls = kwargs.pop('cls', None) # type: ClsType["models.OperationResponse"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ # type: (...) -> Optional["models.OperationResponse"]
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.OperationResponse"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
# Construct URL
- url = self._delete_initial.metadata['url']
+ url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -219,9 +233,8 @@ def _delete_initial(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
@@ -229,17 +242,17 @@ def _delete_initial(
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationResponse', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/synchronizationSettings/{synchronizationSettingName}'}
+ _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/synchronizationSettings/{synchronizationSettingName}'} # type: ignore
def begin_delete(
self,
@@ -249,10 +262,10 @@ def begin_delete(
synchronization_setting_name, # type: str
**kwargs # type: Any
):
- # type: (...) -> "models.OperationResponse"
- """Delete a synchronizationSetting in a share.
+ # type: (...) -> LROPoller["models.OperationResponse"]
+ """Delete synchronizationSetting in a share.
- Delete synchronizationSetting in a share.
+ Delete a synchronizationSetting in a share.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -263,24 +276,34 @@ def begin_delete(
:param synchronization_setting_name: The name of the synchronizationSetting .
:type synchronization_setting_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
- :return: An instance of LROPoller that returns OperationResponse
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either OperationResponse or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~data_share_management_client.models.OperationResponse]
-
:raises ~azure.core.exceptions.HttpResponseError:
"""
- polling = kwargs.pop('polling', False) # type: Union[bool, PollingMethod]
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.OperationResponse"]
- raw_result = self._delete_initial(
- resource_group_name=resource_group_name,
- account_name=account_name,
- share_name=share_name,
- synchronization_setting_name=synchronization_setting_name,
- cls=lambda x,y,z: x,
- **kwargs
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
)
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._delete_initial(
+ resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_name=share_name,
+ synchronization_setting_name=synchronization_setting_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('OperationResponse', pipeline_response)
@@ -289,15 +312,27 @@ def get_long_running_output(pipeline_response):
return cls(pipeline_response, deserialized, {})
return deserialized
- lro_delay = kwargs.get(
- 'polling_interval',
- self._config.polling_interval
- )
- if polling is True: raise ValueError("polling being True is not valid because no default polling implemetation has been defined.")
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'accountName': self._serialize.url("account_name", account_name, 'str'),
+ 'shareName': self._serialize.url("share_name", share_name, 'str'),
+ 'synchronizationSettingName': self._serialize.url("synchronization_setting_name", synchronization_setting_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
- begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/synchronizationSettings/{synchronizationSettingName}'}
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/synchronizationSettings/{synchronizationSettingName}'} # type: ignore
def list_by_share(
self,
@@ -307,7 +342,7 @@ def list_by_share(
skip_token=None, # type: Optional[str]
**kwargs # type: Any
):
- # type: (...) -> "models.SynchronizationSettingList"
+ # type: (...) -> Iterable["models.SynchronizationSettingList"]
"""List synchronizationSettings in a share.
List synchronizationSettings in a share.
@@ -321,18 +356,26 @@ def list_by_share(
:param skip_token: continuation token.
:type skip_token: str
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: SynchronizationSettingList or the result of cls(response)
- :rtype: ~data_share_management_client.models.SynchronizationSettingList
+ :return: An iterator like instance of either SynchronizationSettingList or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~data_share_management_client.models.SynchronizationSettingList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.SynchronizationSettingList"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
if not next_link:
# Construct URL
- url = self.list_by_share.metadata['url']
+ url = self.list_by_share.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -340,21 +383,17 @@ def prepare_request(next_link=None):
'shareName': self._serialize.url("share_name", share_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
- if skip_token is not None:
- query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- # Construct and send request
- request = self._client.get(url, query_parameters, header_parameters)
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
@@ -373,11 +412,11 @@ def get_next(next_link=None):
if response.status_code not in [200]:
error = self._deserialize(models.DataShareError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
- list_by_share.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/synchronizationSettings'}
+ list_by_share.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/synchronizationSettings'} # type: ignore
diff --git a/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_trigger_operations.py b/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_triggers_operations.py
similarity index 65%
rename from src/datashare/azext_datashare/vendored_sdks/datashare/operations/_trigger_operations.py
rename to src/datashare/azext_datashare/vendored_sdks/datashare/operations/_triggers_operations.py
index 1bea3c38c31..dddf2b2da98 100644
--- a/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_trigger_operations.py
+++ b/src/datashare/azext_datashare/vendored_sdks/datashare/operations/_triggers_operations.py
@@ -5,22 +5,28 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
-from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
+from typing import TYPE_CHECKING
import warnings
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models
-T = TypeVar('T')
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
-class TriggerOperations(object):
- """TriggerOperations operations.
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class TriggersOperations(object):
+ """TriggersOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
@@ -50,9 +56,9 @@ def get(
**kwargs # type: Any
):
# type: (...) -> "models.Trigger"
- """Get a Trigger in a shareSubscription.
+ """Get Trigger in a shareSubscription.
- Get Trigger in a shareSubscription.
+ Get a Trigger in a shareSubscription.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -63,16 +69,20 @@ def get(
:param trigger_name: The name of the trigger.
:type trigger_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: Trigger or the result of cls(response)
+ :return: Trigger, or the result of cls(response)
:rtype: ~data_share_management_client.models.Trigger
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.Trigger"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
# Construct URL
- url = self.get.metadata['url']
+ url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -88,9 +98,8 @@ def get(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
@@ -98,15 +107,15 @@ def get(
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('Trigger', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/triggers/{triggerName}'}
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/triggers/{triggerName}'} # type: ignore
def _create_initial(
self,
@@ -119,12 +128,16 @@ def _create_initial(
):
# type: (...) -> "models.Trigger"
cls = kwargs.pop('cls', None) # type: ClsType["models.Trigger"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
# Construct URL
- url = self._create_initial.metadata['url']
+ url = self._create_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -141,23 +154,20 @@ def _create_initial(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(trigger, 'Trigger')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
- deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Trigger', pipeline_response)
@@ -165,10 +175,10 @@ def _create_initial(
deserialized = self._deserialize('Trigger', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- _create_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/triggers/{triggerName}'}
+ _create_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/triggers/{triggerName}'} # type: ignore
def begin_create(
self,
@@ -179,42 +189,52 @@ def begin_create(
trigger, # type: "models.Trigger"
**kwargs # type: Any
):
- # type: (...) -> "models.Trigger"
- """Create a Trigger.
+ # type: (...) -> LROPoller["models.Trigger"]
+ """This method creates a trigger for a share subscription.
- This method creates a trigger for a share subscription.
+ Create a Trigger.
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param account_name: The name of the share account.
:type account_name: str
:param share_subscription_name: The name of the share subscription which will hold the data set
- sink.
+ sink.
:type share_subscription_name: str
:param trigger_name: The name of the trigger.
:type trigger_name: str
:param trigger: Trigger details.
:type trigger: ~data_share_management_client.models.Trigger
:keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
- :return: An instance of LROPoller that returns Trigger
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either Trigger or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~data_share_management_client.models.Trigger]
-
:raises ~azure.core.exceptions.HttpResponseError:
"""
- polling = kwargs.pop('polling', False) # type: Union[bool, PollingMethod]
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.Trigger"]
- raw_result = self._create_initial(
- resource_group_name=resource_group_name,
- account_name=account_name,
- share_subscription_name=share_subscription_name,
- trigger_name=trigger_name,
- trigger=trigger,
- cls=lambda x,y,z: x,
- **kwargs
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
)
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._create_initial(
+ resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_subscription_name=share_subscription_name,
+ trigger_name=trigger_name,
+ trigger=trigger,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('Trigger', pipeline_response)
@@ -223,15 +243,27 @@ def get_long_running_output(pipeline_response):
return cls(pipeline_response, deserialized, {})
return deserialized
- lro_delay = kwargs.get(
- 'polling_interval',
- self._config.polling_interval
- )
- if polling is True: raise ValueError("polling being True is not valid because no default polling implemetation has been defined.")
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'accountName': self._serialize.url("account_name", account_name, 'str'),
+ 'shareSubscriptionName': self._serialize.url("share_subscription_name", share_subscription_name, 'str'),
+ 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
- begin_create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/triggers/{triggerName}'}
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/triggers/{triggerName}'} # type: ignore
def _delete_initial(
self,
@@ -241,13 +273,17 @@ def _delete_initial(
trigger_name, # type: str
**kwargs # type: Any
):
- # type: (...) -> "models.OperationResponse"
- cls = kwargs.pop('cls', None) # type: ClsType["models.OperationResponse"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ # type: (...) -> Optional["models.OperationResponse"]
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.OperationResponse"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
# Construct URL
- url = self._delete_initial.metadata['url']
+ url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -263,9 +299,8 @@ def _delete_initial(
# Construct headers
header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
- # Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
@@ -273,17 +308,17 @@ def _delete_initial(
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.DataShareError, response)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationResponse', pipeline_response)
if cls:
- return cls(pipeline_response, deserialized, {})
+ return cls(pipeline_response, deserialized, {})
return deserialized
- _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/triggers/{triggerName}'}
+ _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/triggers/{triggerName}'} # type: ignore
def begin_delete(
self,
@@ -293,10 +328,10 @@ def begin_delete(
trigger_name, # type: str
**kwargs # type: Any
):
- # type: (...) -> "models.OperationResponse"
- """Delete a Trigger in a shareSubscription.
+ # type: (...) -> LROPoller["models.OperationResponse"]
+ """Delete Trigger in a shareSubscription.
- Delete Trigger in a shareSubscription.
+ Delete a Trigger in a shareSubscription.
:param resource_group_name: The resource group name.
:type resource_group_name: str
@@ -307,24 +342,34 @@ def begin_delete(
:param trigger_name: The name of the trigger.
:type trigger_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
- :return: An instance of LROPoller that returns OperationResponse
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either OperationResponse or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~data_share_management_client.models.OperationResponse]
-
:raises ~azure.core.exceptions.HttpResponseError:
"""
- polling = kwargs.pop('polling', False) # type: Union[bool, PollingMethod]
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.OperationResponse"]
- raw_result = self._delete_initial(
- resource_group_name=resource_group_name,
- account_name=account_name,
- share_subscription_name=share_subscription_name,
- trigger_name=trigger_name,
- cls=lambda x,y,z: x,
- **kwargs
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
)
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._delete_initial(
+ resource_group_name=resource_group_name,
+ account_name=account_name,
+ share_subscription_name=share_subscription_name,
+ trigger_name=trigger_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('OperationResponse', pipeline_response)
@@ -333,15 +378,27 @@ def get_long_running_output(pipeline_response):
return cls(pipeline_response, deserialized, {})
return deserialized
- lro_delay = kwargs.get(
- 'polling_interval',
- self._config.polling_interval
- )
- if polling is True: raise ValueError("polling being True is not valid because no default polling implemetation has been defined.")
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'accountName': self._serialize.url("account_name", account_name, 'str'),
+ 'shareSubscriptionName': self._serialize.url("share_subscription_name", share_subscription_name, 'str'),
+ 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
- begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/triggers/{triggerName}'}
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/triggers/{triggerName}'} # type: ignore
def list_by_share_subscription(
self,
@@ -351,7 +408,7 @@ def list_by_share_subscription(
skip_token=None, # type: Optional[str]
**kwargs # type: Any
):
- # type: (...) -> "models.TriggerList"
+ # type: (...) -> Iterable["models.TriggerList"]
"""List Triggers in a share subscription.
List Triggers in a share subscription.
@@ -365,18 +422,26 @@ def list_by_share_subscription(
:param skip_token: Continuation token.
:type skip_token: str
:keyword callable cls: A custom type or function that will be passed the direct response
- :return: TriggerList or the result of cls(response)
- :rtype: ~data_share_management_client.models.TriggerList
+ :return: An iterator like instance of either TriggerList or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~data_share_management_client.models.TriggerList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerList"]
- error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
- api_version = "2019-11-01"
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2020-09-01"
+ accept = "application/json"
def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
if not next_link:
# Construct URL
- url = self.list_by_share_subscription.metadata['url']
+ url = self.list_by_share_subscription.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
@@ -384,21 +449,17 @@ def prepare_request(next_link=None):
'shareSubscriptionName': self._serialize.url("share_subscription_name", share_subscription_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
- if skip_token is not None:
- query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- # Construct and send request
- request = self._client.get(url, query_parameters, header_parameters)
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
@@ -417,11 +478,11 @@ def get_next(next_link=None):
if response.status_code not in [200]:
error = self._deserialize(models.DataShareError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, model=error)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
- list_by_share_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/triggers'}
+ list_by_share_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/triggers'} # type: ignore
diff --git a/src/datashare/azext_datashare/vendored_sdks/datashare/py.typed b/src/datashare/azext_datashare/vendored_sdks/datashare/py.typed
new file mode 100644
index 00000000000..e5aff4f83af
--- /dev/null
+++ b/src/datashare/azext_datashare/vendored_sdks/datashare/py.typed
@@ -0,0 +1 @@
+# Marker file for PEP 561.
\ No newline at end of file
diff --git a/src/datashare/report.md b/src/datashare/report.md
index 652f8051a95..826fbb5734b 100644
--- a/src/datashare/report.md
+++ b/src/datashare/report.md
@@ -1,483 +1,979 @@
-# Azure CLI Module Creation Report
-
-### datashare account create
-
-create a datashare account.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--resource_group_name**|string|The resource group name.|resource_group_name|resource_group_name|
-|**--account_name**|string|The name of the share account.|account_name|account_name|
-|**--identity**|object|Identity of resource|identity|identity|
-|**--location**|string|Location of the azure resource.|location|location|
-|**--tags**|dictionary|Tags on the azure resource.|tags|tags|
-### datashare account delete
-
-delete a datashare account.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--resource_group_name**|string|The resource group name.|resource_group_name|resource_group_name|
-|**--account_name**|string|The name of the share account.|account_name|account_name|
-### datashare account list
-
-list a datashare account.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--resource_group_name**|string|The resource group name.|resource_group_name|resource_group_name|
-|**--skip_token**|string|Continuation token|skip_token|skip_token|
-### datashare account show
-
-show a datashare account.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--resource_group_name**|string|The resource group name.|resource_group_name|resource_group_name|
-|**--account_name**|string|The name of the share account.|account_name|account_name|
-### datashare account update
-
-update a datashare account.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--resource_group_name**|string|The resource group name.|resource_group_name|resource_group_name|
-|**--account_name**|string|The name of the share account.|account_name|account_name|
-|**--tags**|dictionary|Tags on the azure resource.|tags|tags|
-### datashare consumer-invitation list
-
-list a datashare consumer-invitation.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--skip_token**|string|Continuation token|skip_token|skip_token|
-### datashare consumer-invitation reject-invitation
-
-reject-invitation a datashare consumer-invitation.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--location**|string|Location of the invitation|location|location|
-|**--invitation_id**|string|Unique id of the invitation.|invitation_id|properties_invitation_id|
-### datashare consumer-invitation show
-
-show a datashare consumer-invitation.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--location**|string|Location of the invitation|location|location|
-|**--invitation_id**|string|An invitation id|invitation_id|invitation_id|
-### datashare consumer-source-data-set list
-
-list a datashare consumer-source-data-set.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--resource_group_name**|string|The resource group name.|resource_group_name|resource_group_name|
-|**--account_name**|string|The name of the share account.|account_name|account_name|
-|**--share_subscription_name**|string|The name of the shareSubscription.|share_subscription_name|share_subscription_name|
-|**--skip_token**|string|Continuation token|skip_token|skip_token|
-### datashare data-set create
-
-create a datashare data-set.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--resource_group_name**|string|The resource group name.|resource_group_name|resource_group_name|
-|**--account_name**|string|The name of the share account.|account_name|account_name|
-|**--share_name**|string|The name of the share.|share_name|share_name|
-|**--data_set_name**|string|The name of the dataSet.|data_set_name|data_set_name|
-|**--kind**|choice|Kind of data set.|kind|kind|
-### datashare data-set delete
-
-delete a datashare data-set.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--resource_group_name**|string|The resource group name.|resource_group_name|resource_group_name|
-|**--account_name**|string|The name of the share account.|account_name|account_name|
-|**--share_name**|string|The name of the share.|share_name|share_name|
-|**--data_set_name**|string|The name of the dataSet.|data_set_name|data_set_name|
-### datashare data-set list
-
-list a datashare data-set.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--resource_group_name**|string|The resource group name.|resource_group_name|resource_group_name|
-|**--account_name**|string|The name of the share account.|account_name|account_name|
-|**--share_name**|string|The name of the share.|share_name|share_name|
-|**--skip_token**|string|Continuation token|skip_token|skip_token|
-### datashare data-set show
-
-show a datashare data-set.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--resource_group_name**|string|The resource group name.|resource_group_name|resource_group_name|
-|**--account_name**|string|The name of the share account.|account_name|account_name|
-|**--share_name**|string|The name of the share.|share_name|share_name|
-|**--data_set_name**|string|The name of the dataSet.|data_set_name|data_set_name|
-### datashare data-set-mapping create
-
-create a datashare data-set-mapping.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--resource_group_name**|string|The resource group name.|resource_group_name|resource_group_name|
-|**--account_name**|string|The name of the share account.|account_name|account_name|
-|**--share_subscription_name**|string|The name of the shareSubscription.|share_subscription_name|share_subscription_name|
-|**--data_set_mapping_name**|string|The name of the dataSetMapping.|data_set_mapping_name|data_set_mapping_name|
-|**--kind**|choice|Kind of data set.|kind|kind|
-### datashare data-set-mapping delete
-
-delete a datashare data-set-mapping.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--resource_group_name**|string|The resource group name.|resource_group_name|resource_group_name|
-|**--account_name**|string|The name of the share account.|account_name|account_name|
-|**--share_subscription_name**|string|The name of the shareSubscription.|share_subscription_name|share_subscription_name|
-|**--data_set_mapping_name**|string|The name of the dataSetMapping.|data_set_mapping_name|data_set_mapping_name|
-### datashare data-set-mapping list
-
-list a datashare data-set-mapping.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--resource_group_name**|string|The resource group name.|resource_group_name|resource_group_name|
-|**--account_name**|string|The name of the share account.|account_name|account_name|
-|**--share_subscription_name**|string|The name of the shareSubscription.|share_subscription_name|share_subscription_name|
-|**--skip_token**|string|Continuation token|skip_token|skip_token|
-### datashare data-set-mapping show
-
-show a datashare data-set-mapping.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--resource_group_name**|string|The resource group name.|resource_group_name|resource_group_name|
-|**--account_name**|string|The name of the share account.|account_name|account_name|
-|**--share_subscription_name**|string|The name of the shareSubscription.|share_subscription_name|share_subscription_name|
-|**--data_set_mapping_name**|string|The name of the dataSetMapping.|data_set_mapping_name|data_set_mapping_name|
-### datashare invitation create
-
-create a datashare invitation.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--resource_group_name**|string|The resource group name.|resource_group_name|resource_group_name|
-|**--account_name**|string|The name of the share account.|account_name|account_name|
-|**--share_name**|string|The name of the share.|share_name|share_name|
-|**--invitation_name**|string|The name of the invitation.|invitation_name|invitation_name|
-|**--target_active_directory_id**|string|The target Azure AD Id. Can't be combined with email.|target_active_directory_id|properties_target_active_directory_id|
-|**--target_email**|string|The email the invitation is directed to.|target_email|properties_target_email|
-|**--target_object_id**|string|The target user or application Id that invitation is being sent to. Must be specified along TargetActiveDirectoryId. This enables sending invitations to specific users or applications in an AD tenant.|target_object_id|properties_target_object_id|
-### datashare invitation delete
-
-delete a datashare invitation.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--resource_group_name**|string|The resource group name.|resource_group_name|resource_group_name|
-|**--account_name**|string|The name of the share account.|account_name|account_name|
-|**--share_name**|string|The name of the share.|share_name|share_name|
-|**--invitation_name**|string|The name of the invitation.|invitation_name|invitation_name|
-### datashare invitation list
-
-list a datashare invitation.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--resource_group_name**|string|The resource group name.|resource_group_name|resource_group_name|
-|**--account_name**|string|The name of the share account.|account_name|account_name|
-|**--share_name**|string|The name of the share.|share_name|share_name|
-|**--skip_token**|string|Continuation token|skip_token|skip_token|
-### datashare invitation show
-
-show a datashare invitation.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--resource_group_name**|string|The resource group name.|resource_group_name|resource_group_name|
-|**--account_name**|string|The name of the share account.|account_name|account_name|
-|**--share_name**|string|The name of the share.|share_name|share_name|
-|**--invitation_name**|string|The name of the invitation.|invitation_name|invitation_name|
-### datashare provider-share-subscription list
-
-list a datashare provider-share-subscription.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--resource_group_name**|string|The resource group name.|resource_group_name|resource_group_name|
-|**--account_name**|string|The name of the share account.|account_name|account_name|
-|**--share_name**|string|The name of the share.|share_name|share_name|
-|**--skip_token**|string|Continuation token|skip_token|skip_token|
-### datashare provider-share-subscription reinstate
-
-reinstate a datashare provider-share-subscription.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--resource_group_name**|string|The resource group name.|resource_group_name|resource_group_name|
-|**--account_name**|string|The name of the share account.|account_name|account_name|
-|**--share_name**|string|The name of the share.|share_name|share_name|
-|**--provider_share_subscription_id**|string|To locate shareSubscription|provider_share_subscription_id|provider_share_subscription_id|
-### datashare provider-share-subscription revoke
-
-revoke a datashare provider-share-subscription.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--resource_group_name**|string|The resource group name.|resource_group_name|resource_group_name|
-|**--account_name**|string|The name of the share account.|account_name|account_name|
-|**--share_name**|string|The name of the share.|share_name|share_name|
-|**--provider_share_subscription_id**|string|To locate shareSubscription|provider_share_subscription_id|provider_share_subscription_id|
-### datashare provider-share-subscription show
-
-show a datashare provider-share-subscription.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--resource_group_name**|string|The resource group name.|resource_group_name|resource_group_name|
-|**--account_name**|string|The name of the share account.|account_name|account_name|
-|**--share_name**|string|The name of the share.|share_name|share_name|
-|**--provider_share_subscription_id**|string|To locate shareSubscription|provider_share_subscription_id|provider_share_subscription_id|
-### datashare share create
-
-create a datashare share.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--resource_group_name**|string|The resource group name.|resource_group_name|resource_group_name|
-|**--account_name**|string|The name of the share account.|account_name|account_name|
-|**--share_name**|string|The name of the share.|share_name|share_name|
-|**--description**|string|Share description.|description|properties_description|
-|**--share_kind**|choice|Share kind.|share_kind|properties_share_kind|
-|**--terms**|string|Share terms.|terms|properties_terms|
-### datashare share delete
-
-delete a datashare share.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--resource_group_name**|string|The resource group name.|resource_group_name|resource_group_name|
-|**--account_name**|string|The name of the share account.|account_name|account_name|
-|**--share_name**|string|The name of the share.|share_name|share_name|
-### datashare share list
-
-list a datashare share.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--resource_group_name**|string|The resource group name.|resource_group_name|resource_group_name|
-|**--account_name**|string|The name of the share account.|account_name|account_name|
-|**--skip_token**|string|Continuation token|skip_token|skip_token|
-### datashare share list-synchronization
-
-list-synchronization a datashare share.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--resource_group_name**|string|The resource group name.|resource_group_name|resource_group_name|
-|**--account_name**|string|The name of the share account.|account_name|account_name|
-|**--share_name**|string|The name of the share.|share_name|share_name|
-|**--skip_token**|string|Continuation token|skip_token|skip_token|
-### datashare share list-synchronization-detail
-
-list-synchronization-detail a datashare share.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--resource_group_name**|string|The resource group name.|resource_group_name|resource_group_name|
-|**--account_name**|string|The name of the share account.|account_name|account_name|
-|**--share_name**|string|The name of the share.|share_name|share_name|
-|**--skip_token**|string|Continuation token|skip_token|skip_token|
-|**--consumer_email**|string|Email of the user who created the synchronization|consumer_email|consumer_email|
-|**--consumer_name**|string|Name of the user who created the synchronization|consumer_name|consumer_name|
-|**--consumer_tenant_name**|string|Tenant name of the consumer who created the synchronization|consumer_tenant_name|consumer_tenant_name|
-|**--duration_ms**|integer|synchronization duration|duration_ms|duration_ms|
-|**--end_time**|date-time|End time of synchronization|end_time|end_time|
-|**--message**|string|message of synchronization|message|message|
-|**--start_time**|date-time|start time of synchronization|start_time|start_time|
-|**--status**|string|Raw Status|status|status|
-|**--synchronization_id**|string|Synchronization id|synchronization_id|synchronization_id|
-### datashare share show
-
-show a datashare share.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--resource_group_name**|string|The resource group name.|resource_group_name|resource_group_name|
-|**--account_name**|string|The name of the share account.|account_name|account_name|
-|**--share_name**|string|The name of the share.|share_name|share_name|
-### datashare share-subscription cancel-synchronization
-
-cancel-synchronization a datashare share-subscription.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--resource_group_name**|string|The resource group name.|resource_group_name|resource_group_name|
-|**--account_name**|string|The name of the share account.|account_name|account_name|
-|**--share_subscription_name**|string|The name of the shareSubscription.|share_subscription_name|share_subscription_name|
-|**--synchronization_id**|string|Synchronization id|synchronization_id|synchronization_id|
-### datashare share-subscription create
-
-create a datashare share-subscription.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--resource_group_name**|string|The resource group name.|resource_group_name|resource_group_name|
-|**--account_name**|string|The name of the share account.|account_name|account_name|
-|**--share_subscription_name**|string|The name of the shareSubscription.|share_subscription_name|share_subscription_name|
-|**--invitation_id**|string|The invitation id.|invitation_id|properties_invitation_id|
-|**--source_share_location**|string|Source share location.|source_share_location|properties_source_share_location|
-### datashare share-subscription delete
-
-delete a datashare share-subscription.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--resource_group_name**|string|The resource group name.|resource_group_name|resource_group_name|
-|**--account_name**|string|The name of the share account.|account_name|account_name|
-|**--share_subscription_name**|string|The name of the shareSubscription.|share_subscription_name|share_subscription_name|
-### datashare share-subscription list
-
-list a datashare share-subscription.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--resource_group_name**|string|The resource group name.|resource_group_name|resource_group_name|
-|**--account_name**|string|The name of the share account.|account_name|account_name|
-|**--skip_token**|string|Continuation token|skip_token|skip_token|
-### datashare share-subscription list-source-share-synchronization-setting
-
-list-source-share-synchronization-setting a datashare share-subscription.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--resource_group_name**|string|The resource group name.|resource_group_name|resource_group_name|
-|**--account_name**|string|The name of the share account.|account_name|account_name|
-|**--share_subscription_name**|string|The name of the shareSubscription.|share_subscription_name|share_subscription_name|
-|**--skip_token**|string|Continuation token|skip_token|skip_token|
-### datashare share-subscription list-synchronization
-
-list-synchronization a datashare share-subscription.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--resource_group_name**|string|The resource group name.|resource_group_name|resource_group_name|
-|**--account_name**|string|The name of the share account.|account_name|account_name|
-|**--share_subscription_name**|string|The name of the shareSubscription.|share_subscription_name|share_subscription_name|
-|**--skip_token**|string|Continuation token|skip_token|skip_token|
-### datashare share-subscription list-synchronization-detail
-
-list-synchronization-detail a datashare share-subscription.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--resource_group_name**|string|The resource group name.|resource_group_name|resource_group_name|
-|**--account_name**|string|The name of the share account.|account_name|account_name|
-|**--share_subscription_name**|string|The name of the shareSubscription.|share_subscription_name|share_subscription_name|
-|**--synchronization_id**|string|Synchronization id|synchronization_id|synchronization_id|
-|**--skip_token**|string|Continuation token|skip_token|skip_token|
-### datashare share-subscription show
-
-show a datashare share-subscription.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--resource_group_name**|string|The resource group name.|resource_group_name|resource_group_name|
-|**--account_name**|string|The name of the share account.|account_name|account_name|
-|**--share_subscription_name**|string|The name of the shareSubscription.|share_subscription_name|share_subscription_name|
-### datashare share-subscription synchronize
-
-synchronize a datashare share-subscription.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--resource_group_name**|string|The resource group name.|resource_group_name|resource_group_name|
-|**--account_name**|string|The name of the share account.|account_name|account_name|
-|**--share_subscription_name**|string|The name of the shareSubscription.|share_subscription_name|share_subscription_name|
-|**--synchronization_mode**|choice|Synchronization mode|synchronization_mode|synchronization_mode|
-### datashare synchronization-setting create
-
-create a datashare synchronization-setting.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--resource_group_name**|string|The resource group name.|resource_group_name|resource_group_name|
-|**--account_name**|string|The name of the share account.|account_name|account_name|
-|**--share_name**|string|The name of the share.|share_name|share_name|
-|**--synchronization_setting_name**|string|The name of the synchronizationSetting.|synchronization_setting_name|synchronization_setting_name|
-|**--kind**|choice|Kind of data set.|kind|kind|
-### datashare synchronization-setting delete
-
-delete a datashare synchronization-setting.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--resource_group_name**|string|The resource group name.|resource_group_name|resource_group_name|
-|**--account_name**|string|The name of the share account.|account_name|account_name|
-|**--share_name**|string|The name of the share.|share_name|share_name|
-|**--synchronization_setting_name**|string|The name of the synchronizationSetting.|synchronization_setting_name|synchronization_setting_name|
-### datashare synchronization-setting list
-
-list a datashare synchronization-setting.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--resource_group_name**|string|The resource group name.|resource_group_name|resource_group_name|
-|**--account_name**|string|The name of the share account.|account_name|account_name|
-|**--share_name**|string|The name of the share.|share_name|share_name|
-|**--skip_token**|string|Continuation token|skip_token|skip_token|
-### datashare synchronization-setting show
-
-show a datashare synchronization-setting.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--resource_group_name**|string|The resource group name.|resource_group_name|resource_group_name|
-|**--account_name**|string|The name of the share account.|account_name|account_name|
-|**--share_name**|string|The name of the share.|share_name|share_name|
-|**--synchronization_setting_name**|string|The name of the synchronizationSetting.|synchronization_setting_name|synchronization_setting_name|
-### datashare trigger create
-
-create a datashare trigger.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--resource_group_name**|string|The resource group name.|resource_group_name|resource_group_name|
-|**--account_name**|string|The name of the share account.|account_name|account_name|
-|**--share_subscription_name**|string|The name of the shareSubscription.|share_subscription_name|share_subscription_name|
-|**--trigger_name**|string|The name of the trigger.|trigger_name|trigger_name|
-|**--kind**|choice|Kind of data set.|kind|kind|
-### datashare trigger delete
-
-delete a datashare trigger.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--resource_group_name**|string|The resource group name.|resource_group_name|resource_group_name|
-|**--account_name**|string|The name of the share account.|account_name|account_name|
-|**--share_subscription_name**|string|The name of the shareSubscription.|share_subscription_name|share_subscription_name|
-|**--trigger_name**|string|The name of the trigger.|trigger_name|trigger_name|
-### datashare trigger list
-
-list a datashare trigger.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--resource_group_name**|string|The resource group name.|resource_group_name|resource_group_name|
-|**--account_name**|string|The name of the share account.|account_name|account_name|
-|**--share_subscription_name**|string|The name of the shareSubscription.|share_subscription_name|share_subscription_name|
-|**--skip_token**|string|Continuation token|skip_token|skip_token|
-### datashare trigger show
-
-show a datashare trigger.
-
-|Option|Type|Description|Path (SDK)|Path (swagger)|
-|------|----|-----------|----------|--------------|
-|**--resource_group_name**|string|The resource group name.|resource_group_name|resource_group_name|
-|**--account_name**|string|The name of the share account.|account_name|account_name|
-|**--share_subscription_name**|string|The name of the shareSubscription.|share_subscription_name|share_subscription_name|
-|**--trigger_name**|string|The name of the trigger.|trigger_name|trigger_name|
\ No newline at end of file
+# Azure CLI Module Creation Report
+
+## EXTENSION
+|CLI Extension|Command Groups|
+|---------|------------|
+|az datashare|[groups](#CommandGroups)
+
+## GROUPS
+### Command groups in `az datashare` extension
+|CLI Command Group|Group Swagger name|Commands|
+|---------|------------|--------|
+|az datashare account|Accounts|[commands](#CommandsInAccounts)|
+|az datashare consumer-invitation|ConsumerInvitations|[commands](#CommandsInConsumerInvitations)|
+|az datashare data-set|DataSets|[commands](#CommandsInDataSets)|
+|az datashare data-set-mapping|DataSetMappings|[commands](#CommandsInDataSetMappings)|
+|az datashare invitation|Invitations|[commands](#CommandsInInvitations)|
+|az datashare|Shares|[commands](#CommandsInShares)|
+|az datashare provider-share-subscription|ProviderShareSubscriptions|[commands](#CommandsInProviderShareSubscriptions)|
+|az datashare share-subscription|ShareSubscriptions|[commands](#CommandsInShareSubscriptions)|
+|az datashare consumer-source-data-set|ConsumerSourceDataSets|[commands](#CommandsInConsumerSourceDataSets)|
+|az datashare synchronization-setting|SynchronizationSettings|[commands](#CommandsInSynchronizationSettings)|
+|az datashare trigger|Triggers|[commands](#CommandsInTriggers)|
+
+## COMMANDS
+### Commands in `az datashare` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az datashare list](#SharesListByAccount)|ListByAccount|[Parameters](#ParametersSharesListByAccount)|[Example](#ExamplesSharesListByAccount)|
+|[az datashare show](#SharesGet)|Get|[Parameters](#ParametersSharesGet)|[Example](#ExamplesSharesGet)|
+|[az datashare create](#SharesCreate)|Create|[Parameters](#ParametersSharesCreate)|[Example](#ExamplesSharesCreate)|
+|[az datashare delete](#SharesDelete)|Delete|[Parameters](#ParametersSharesDelete)|[Example](#ExamplesSharesDelete)|
+|[az datashare list-synchronization](#SharesListSynchronizations)|ListSynchronizations|[Parameters](#ParametersSharesListSynchronizations)|[Example](#ExamplesSharesListSynchronizations)|
+|[az datashare list-synchronization-detail](#SharesListSynchronizationDetails)|ListSynchronizationDetails|[Parameters](#ParametersSharesListSynchronizationDetails)|[Example](#ExamplesSharesListSynchronizationDetails)|
+
+### Commands in `az datashare account` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az datashare account list](#AccountsListByResourceGroup)|ListByResourceGroup|[Parameters](#ParametersAccountsListByResourceGroup)|[Example](#ExamplesAccountsListByResourceGroup)|
+|[az datashare account list](#AccountsListBySubscription)|ListBySubscription|[Parameters](#ParametersAccountsListBySubscription)|[Example](#ExamplesAccountsListBySubscription)|
+|[az datashare account show](#AccountsGet)|Get|[Parameters](#ParametersAccountsGet)|[Example](#ExamplesAccountsGet)|
+|[az datashare account create](#AccountsCreate)|Create|[Parameters](#ParametersAccountsCreate)|[Example](#ExamplesAccountsCreate)|
+|[az datashare account update](#AccountsUpdate)|Update|[Parameters](#ParametersAccountsUpdate)|[Example](#ExamplesAccountsUpdate)|
+|[az datashare account delete](#AccountsDelete)|Delete|[Parameters](#ParametersAccountsDelete)|[Example](#ExamplesAccountsDelete)|
+
+### Commands in `az datashare consumer-invitation` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az datashare consumer-invitation show](#ConsumerInvitationsGet)|Get|[Parameters](#ParametersConsumerInvitationsGet)|[Example](#ExamplesConsumerInvitationsGet)|
+|[az datashare consumer-invitation list-invitation](#ConsumerInvitationsListInvitations)|ListInvitations|[Parameters](#ParametersConsumerInvitationsListInvitations)|[Example](#ExamplesConsumerInvitationsListInvitations)|
+|[az datashare consumer-invitation reject-invitation](#ConsumerInvitationsRejectInvitation)|RejectInvitation|[Parameters](#ParametersConsumerInvitationsRejectInvitation)|[Example](#ExamplesConsumerInvitationsRejectInvitation)|
+
+### Commands in `az datashare consumer-source-data-set` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az datashare consumer-source-data-set list](#ConsumerSourceDataSetsListByShareSubscription)|ListByShareSubscription|[Parameters](#ParametersConsumerSourceDataSetsListByShareSubscription)|[Example](#ExamplesConsumerSourceDataSetsListByShareSubscription)|
+
+### Commands in `az datashare data-set` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az datashare data-set list](#DataSetsListByShare)|ListByShare|[Parameters](#ParametersDataSetsListByShare)|[Example](#ExamplesDataSetsListByShare)|
+|[az datashare data-set show](#DataSetsGet)|Get|[Parameters](#ParametersDataSetsGet)|[Example](#ExamplesDataSetsGet)|
+|[az datashare data-set create](#DataSetsCreate)|Create|[Parameters](#ParametersDataSetsCreate)|[Example](#ExamplesDataSetsCreate)|
+|[az datashare data-set delete](#DataSetsDelete)|Delete|[Parameters](#ParametersDataSetsDelete)|[Example](#ExamplesDataSetsDelete)|
+
+### Commands in `az datashare data-set-mapping` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az datashare data-set-mapping list](#DataSetMappingsListByShareSubscription)|ListByShareSubscription|[Parameters](#ParametersDataSetMappingsListByShareSubscription)|[Example](#ExamplesDataSetMappingsListByShareSubscription)|
+|[az datashare data-set-mapping show](#DataSetMappingsGet)|Get|[Parameters](#ParametersDataSetMappingsGet)|[Example](#ExamplesDataSetMappingsGet)|
+|[az datashare data-set-mapping create](#DataSetMappingsCreate)|Create|[Parameters](#ParametersDataSetMappingsCreate)|[Example](#ExamplesDataSetMappingsCreate)|
+|[az datashare data-set-mapping delete](#DataSetMappingsDelete)|Delete|[Parameters](#ParametersDataSetMappingsDelete)|[Example](#ExamplesDataSetMappingsDelete)|
+
+### Commands in `az datashare invitation` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az datashare invitation list](#InvitationsListByShare)|ListByShare|[Parameters](#ParametersInvitationsListByShare)|[Example](#ExamplesInvitationsListByShare)|
+|[az datashare invitation show](#InvitationsGet)|Get|[Parameters](#ParametersInvitationsGet)|[Example](#ExamplesInvitationsGet)|
+|[az datashare invitation create](#InvitationsCreate)|Create|[Parameters](#ParametersInvitationsCreate)|[Example](#ExamplesInvitationsCreate)|
+|[az datashare invitation delete](#InvitationsDelete)|Delete|[Parameters](#ParametersInvitationsDelete)|[Example](#ExamplesInvitationsDelete)|
+
+### Commands in `az datashare provider-share-subscription` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az datashare provider-share-subscription list](#ProviderShareSubscriptionsListByShare)|ListByShare|[Parameters](#ParametersProviderShareSubscriptionsListByShare)|[Example](#ExamplesProviderShareSubscriptionsListByShare)|
+|[az datashare provider-share-subscription show](#ProviderShareSubscriptionsGetByShare)|GetByShare|[Parameters](#ParametersProviderShareSubscriptionsGetByShare)|[Example](#ExamplesProviderShareSubscriptionsGetByShare)|
+|[az datashare provider-share-subscription adjust](#ProviderShareSubscriptionsAdjust)|Adjust|[Parameters](#ParametersProviderShareSubscriptionsAdjust)|[Example](#ExamplesProviderShareSubscriptionsAdjust)|
+|[az datashare provider-share-subscription reinstate](#ProviderShareSubscriptionsReinstate)|Reinstate|[Parameters](#ParametersProviderShareSubscriptionsReinstate)|[Example](#ExamplesProviderShareSubscriptionsReinstate)|
+|[az datashare provider-share-subscription revoke](#ProviderShareSubscriptionsRevoke)|Revoke|[Parameters](#ParametersProviderShareSubscriptionsRevoke)|[Example](#ExamplesProviderShareSubscriptionsRevoke)|
+
+### Commands in `az datashare share-subscription` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az datashare share-subscription list](#ShareSubscriptionsListByAccount)|ListByAccount|[Parameters](#ParametersShareSubscriptionsListByAccount)|[Example](#ExamplesShareSubscriptionsListByAccount)|
+|[az datashare share-subscription show](#ShareSubscriptionsGet)|Get|[Parameters](#ParametersShareSubscriptionsGet)|[Example](#ExamplesShareSubscriptionsGet)|
+|[az datashare share-subscription create](#ShareSubscriptionsCreate)|Create|[Parameters](#ParametersShareSubscriptionsCreate)|[Example](#ExamplesShareSubscriptionsCreate)|
+|[az datashare share-subscription delete](#ShareSubscriptionsDelete)|Delete|[Parameters](#ParametersShareSubscriptionsDelete)|[Example](#ExamplesShareSubscriptionsDelete)|
+|[az datashare share-subscription cancel-synchronization](#ShareSubscriptionsCancelSynchronization)|CancelSynchronization|[Parameters](#ParametersShareSubscriptionsCancelSynchronization)|[Example](#ExamplesShareSubscriptionsCancelSynchronization)|
+|[az datashare share-subscription list-source-share-synchronization-setting](#ShareSubscriptionsListSourceShareSynchronizationSettings)|ListSourceShareSynchronizationSettings|[Parameters](#ParametersShareSubscriptionsListSourceShareSynchronizationSettings)|[Example](#ExamplesShareSubscriptionsListSourceShareSynchronizationSettings)|
+|[az datashare share-subscription list-synchronization](#ShareSubscriptionsListSynchronizations)|ListSynchronizations|[Parameters](#ParametersShareSubscriptionsListSynchronizations)|[Example](#ExamplesShareSubscriptionsListSynchronizations)|
+|[az datashare share-subscription list-synchronization-detail](#ShareSubscriptionsListSynchronizationDetails)|ListSynchronizationDetails|[Parameters](#ParametersShareSubscriptionsListSynchronizationDetails)|[Example](#ExamplesShareSubscriptionsListSynchronizationDetails)|
+|[az datashare share-subscription synchronize](#ShareSubscriptionsSynchronize)|Synchronize|[Parameters](#ParametersShareSubscriptionsSynchronize)|[Example](#ExamplesShareSubscriptionsSynchronize)|
+
+### Commands in `az datashare synchronization-setting` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az datashare synchronization-setting list](#SynchronizationSettingsListByShare)|ListByShare|[Parameters](#ParametersSynchronizationSettingsListByShare)|[Example](#ExamplesSynchronizationSettingsListByShare)|
+|[az datashare synchronization-setting show](#SynchronizationSettingsGet)|Get|[Parameters](#ParametersSynchronizationSettingsGet)|[Example](#ExamplesSynchronizationSettingsGet)|
+|[az datashare synchronization-setting create](#SynchronizationSettingsCreate)|Create|[Parameters](#ParametersSynchronizationSettingsCreate)|[Example](#ExamplesSynchronizationSettingsCreate)|
+|[az datashare synchronization-setting delete](#SynchronizationSettingsDelete)|Delete|[Parameters](#ParametersSynchronizationSettingsDelete)|[Example](#ExamplesSynchronizationSettingsDelete)|
+
+### Commands in `az datashare trigger` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az datashare trigger list](#TriggersListByShareSubscription)|ListByShareSubscription|[Parameters](#ParametersTriggersListByShareSubscription)|[Example](#ExamplesTriggersListByShareSubscription)|
+|[az datashare trigger show](#TriggersGet)|Get|[Parameters](#ParametersTriggersGet)|[Example](#ExamplesTriggersGet)|
+|[az datashare trigger create](#TriggersCreate)|Create|[Parameters](#ParametersTriggersCreate)|[Example](#ExamplesTriggersCreate)|
+|[az datashare trigger delete](#TriggersDelete)|Delete|[Parameters](#ParametersTriggersDelete)|[Example](#ExamplesTriggersDelete)|
+
+
+## COMMAND DETAILS
+
+### group `az datashare`
+#### Command `az datashare list`
+
+##### Example
+```
+az datashare list --account-name "Account1" --resource-group "SampleResourceGroup"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--account-name**|string|The name of the share account.|account_name|accountName|
+|**--skip-token**|string|Continuation Token|skip_token|$skipToken|
+|**--filter**|string|Filters the results using OData syntax.|filter|$filter|
+|**--orderby**|string|Sorts the results using OData syntax.|orderby|$orderby|
+
+#### Command `az datashare show`
+
+##### Example
+```
+az datashare show --account-name "Account1" --resource-group "SampleResourceGroup" --name "Share1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--account-name**|string|The name of the share account.|account_name|accountName|
+|**--share-name**|string|The name of the share to retrieve.|share_name|shareName|
+
+#### Command `az datashare create`
+
+##### Example
+```
+az datashare create --account-name "Account1" --resource-group "SampleResourceGroup" --description "share description" \
+--share-kind "CopyBased" --terms "Confidential" --name "Share1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--account-name**|string|The name of the share account.|account_name|accountName|
+|**--share-name**|string|The name of the share.|share_name|shareName|
+|**--description**|string|Share description.|description|description|
+|**--share-kind**|choice|Share kind.|share_kind|shareKind|
+|**--terms**|string|Share terms.|terms|terms|
+
+#### Command `az datashare delete`
+
+##### Example
+```
+az datashare delete --account-name "Account1" --resource-group "SampleResourceGroup" --name "Share1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--account-name**|string|The name of the share account.|account_name|accountName|
+|**--share-name**|string|The name of the share.|share_name|shareName|
+
+#### Command `az datashare list-synchronization`
+
+##### Example
+```
+az datashare list-synchronization --account-name "Account1" --resource-group "SampleResourceGroup" --name "Share1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--account-name**|string|The name of the share account.|account_name|accountName|
+|**--share-name**|string|The name of the share.|share_name|shareName|
+|**--skip-token**|string|Continuation token|skip_token|$skipToken|
+|**--filter**|string|Filters the results using OData syntax.|filter|$filter|
+|**--orderby**|string|Sorts the results using OData syntax.|orderby|$orderby|
+
+#### Command `az datashare list-synchronization-detail`
+
+##### Example
+```
+az datashare list-synchronization-detail --account-name "Account1" --resource-group "SampleResourceGroup" --name \
+"Share1" --synchronization-id "7d0536a6-3fa5-43de-b152-3d07c4f6b2bb"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--account-name**|string|The name of the share account.|account_name|accountName|
+|**--share-name**|string|The name of the share.|share_name|shareName|
+|**--skip-token**|string|Continuation token|skip_token|$skipToken|
+|**--filter**|string|Filters the results using OData syntax.|filter|$filter|
+|**--orderby**|string|Sorts the results using OData syntax.|orderby|$orderby|
+|**--consumer-email**|string|Email of the user who created the synchronization|consumer_email|consumerEmail|
+|**--consumer-name**|string|Name of the user who created the synchronization|consumer_name|consumerName|
+|**--consumer-tenant-name**|string|Tenant name of the consumer who created the synchronization|consumer_tenant_name|consumerTenantName|
+|**--duration-ms**|integer|synchronization duration|duration_ms|durationMs|
+|**--end-time**|date-time|End time of synchronization|end_time|endTime|
+|**--message**|string|message of synchronization|message|message|
+|**--start-time**|date-time|start time of synchronization|start_time|startTime|
+|**--status**|string|Raw Status|status|status|
+|**--synchronization-id**|string|Synchronization id|synchronization_id|synchronizationId|
+
+### group `az datashare account`
+#### Command `az datashare account list`
+
+##### Example
+```
+az datashare account list --resource-group "SampleResourceGroup"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--skip-token**|string|Continuation token|skip_token|$skipToken|
+
+#### Command `az datashare account list`
+
+##### Example
+```
+az datashare account list
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+#### Command `az datashare account show`
+
+##### Example
+```
+az datashare account show --name "Account1" --resource-group "SampleResourceGroup"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--account-name**|string|The name of the share account.|account_name|accountName|
+
+#### Command `az datashare account create`
+
+##### Example
+```
+az datashare account create --location "West US 2" --tags tag1="Red" tag2="White" --name "Account1" --resource-group \
+"SampleResourceGroup"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--account-name**|string|The name of the share account.|account_name|accountName|
+|**--location**|string|Location of the azure resource.|location|location|
+|**--tags**|dictionary|Tags on the azure resource.|tags|tags|
+
+#### Command `az datashare account update`
+
+##### Example
+```
+az datashare account update --name "Account1" --tags tag1="Red" tag2="White" --resource-group "SampleResourceGroup"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--account-name**|string|The name of the share account.|account_name|accountName|
+|**--tags**|dictionary|Tags on the azure resource.|tags|tags|
+
+#### Command `az datashare account delete`
+
+##### Example
+```
+az datashare account delete --name "Account1" --resource-group "SampleResourceGroup"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--account-name**|string|The name of the share account.|account_name|accountName|
+
+### group `az datashare consumer-invitation`
+#### Command `az datashare consumer-invitation show`
+
+##### Example
+```
+az datashare consumer-invitation show --invitation-id "dfbbc788-19eb-4607-a5a1-c74181bfff03" --location "East US 2"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--location**|string|Location of the invitation|location|location|
+|**--invitation-id**|string|An invitation id|invitation_id|invitationId|
+
+#### Command `az datashare consumer-invitation list-invitation`
+
+##### Example
+```
+az datashare consumer-invitation list-invitation
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--skip-token**|string|The continuation token|skip_token|$skipToken|
+
+#### Command `az datashare consumer-invitation reject-invitation`
+
+##### Example
+```
+az datashare consumer-invitation reject-invitation --invitation-id "dfbbc788-19eb-4607-a5a1-c74181bfff03" --location \
+"East US 2"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--location**|string|Location of the invitation|location|location|
+|**--invitation-id**|string|Unique id of the invitation.|invitation_id|invitationId|
+
+### group `az datashare consumer-source-data-set`
+#### Command `az datashare consumer-source-data-set list`
+
+##### Example
+```
+az datashare consumer-source-data-set list --account-name "Account1" --resource-group "SampleResourceGroup" \
+--share-subscription-name "Share1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--account-name**|string|The name of the share account.|account_name|accountName|
+|**--share-subscription-name**|string|The name of the shareSubscription.|share_subscription_name|shareSubscriptionName|
+|**--skip-token**|string|Continuation token|skip_token|$skipToken|
+
+### group `az datashare data-set`
+#### Command `az datashare data-set list`
+
+##### Example
+```
+az datashare data-set list --account-name "Account1" --resource-group "SampleResourceGroup" --share-name "Share1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--account-name**|string|The name of the share account.|account_name|accountName|
+|**--share-name**|string|The name of the share.|share_name|shareName|
+|**--skip-token**|string|continuation token|skip_token|$skipToken|
+|**--filter**|string|Filters the results using OData syntax.|filter|$filter|
+|**--orderby**|string|Sorts the results using OData syntax.|orderby|$orderby|
+
+#### Command `az datashare data-set show`
+
+##### Example
+```
+az datashare data-set show --account-name "Account1" --name "Dataset1" --resource-group "SampleResourceGroup" \
+--share-name "Share1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--account-name**|string|The name of the share account.|account_name|accountName|
+|**--share-name**|string|The name of the share.|share_name|shareName|
+|**--data-set-name**|string|The name of the dataSet.|data_set_name|dataSetName|
+
+#### Command `az datashare data-set create`
+
+##### Example
+```
+az datashare data-set create --account-name "Account1" --blob-data-set container-name="C1" file-path="file21" \
+resource-group="SampleResourceGroup" storage-account-name="storage2" subscription-id="433a8dfd-e5d5-4e77-ad86-90acdc75e\
+b1a" --name "Dataset1" --resource-group "SampleResourceGroup" --share-name "Share1"
+```
+##### Example
+```
+az datashare data-set create --account-name "Account1" --kusto-cluster-data-set kusto-cluster-resource-id="/subscriptio\
+ns/433a8dfd-e5d5-4e77-ad86-90acdc75eb1a/resourceGroups/SampleResourceGroup/providers/Microsoft.Kusto/clusters/Cluster1"\
+ --name "Dataset1" --resource-group "SampleResourceGroup" --share-name "Share1"
+```
+##### Example
+```
+az datashare data-set create --account-name "Account1" --kusto-database-data-set kusto-database-resource-id="/subscript\
+ions/433a8dfd-e5d5-4e77-ad86-90acdc75eb1a/resourceGroups/SampleResourceGroup/providers/Microsoft.Kusto/clusters/Cluster\
+1/databases/Database1" --name "Dataset1" --resource-group "SampleResourceGroup" --share-name "Share1"
+```
+##### Example
+```
+az datashare data-set create --account-name "Account1" --sqldb-table-data-set database-name="SqlDB1" schema-name="dbo" \
+sql-server-resource-id="/subscriptions/433a8dfd-e5d5-4e77-ad86-90acdc75eb1a/resourceGroups/SampleResourceGroup/provider\
+s/Microsoft.Sql/servers/Server1" table-name="Table1" --name "Dataset1" --resource-group "SampleResourceGroup" \
+--share-name "Share1"
+```
+##### Example
+```
+az datashare data-set create --account-name "Account1" --sqldw-table-data-set data-warehouse-name="DataWarehouse1" \
+schema-name="dbo" sql-server-resource-id="/subscriptions/433a8dfd-e5d5-4e77-ad86-90acdc75eb1a/resourceGroups/SampleReso\
+urceGroup/providers/Microsoft.Sql/servers/Server1" table-name="Table1" --name "Dataset1" --resource-group \
+"SampleResourceGroup" --share-name "Share1"
+```
+##### Example
+```
+az datashare data-set create --account-name "sourceAccount" --synapse-workspace-sql-pool-table-data-set \
+synapse-workspace-sql-pool-table-resource-id="/subscriptions/0f3dcfc3-18f8-4099-b381-8353e19d43a7/resourceGroups/Sample\
+ResourceGroup/providers/Microsoft.Synapse/workspaces/ExampleWorkspace/sqlPools/ExampleSqlPool/schemas/dbo/tables/table1\
+" --name "dataset1" --resource-group "SampleResourceGroup" --share-name "share1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--account-name**|string|The name of the share account.|account_name|accountName|
+|**--share-name**|string|The name of the share to add the data set to.|share_name|shareName|
+|**--data-set-name**|string|The name of the dataSet.|data_set_name|dataSetName|
+|**--adls-gen1-file-data-set**|object|An ADLS Gen 1 file data set.|adls_gen1_file_data_set|ADLSGen1FileDataSet|
+|**--adls-gen1-folder-data-set**|object|An ADLS Gen 1 folder data set.|adls_gen1_folder_data_set|ADLSGen1FolderDataSet|
+|**--adls-gen2-file-data-set**|object|An ADLS Gen 2 file data set.|adls_gen2_file_data_set|ADLSGen2FileDataSet|
+|**--adls-gen2-file-system-data-set**|object|An ADLS Gen 2 file system data set.|adls_gen2_file_system_data_set|ADLSGen2FileSystemDataSet|
+|**--adls-gen2-folder-data-set**|object|An ADLS Gen 2 folder data set.|adls_gen2_folder_data_set|ADLSGen2FolderDataSet|
+|**--blob-container-data-set**|object|An Azure storage blob container data set.|blob_container_data_set|BlobContainerDataSet|
+|**--blob-data-set**|object|An Azure storage blob data set.|blob_data_set|BlobDataSet|
+|**--blob-folder-data-set**|object|An Azure storage blob folder data set.|blob_folder_data_set|BlobFolderDataSet|
+|**--kusto-cluster-data-set**|object|A kusto cluster data set.|kusto_cluster_data_set|KustoClusterDataSet|
+|**--kusto-database-data-set**|object|A kusto database data set.|kusto_database_data_set|KustoDatabaseDataSet|
+|**--sqldb-table-data-set**|object|A SQL DB table data set.|sqldb_table_data_set|SqlDBTableDataSet|
+|**--sqldw-table-data-set**|object|A SQL DW table data set.|sqldw_table_data_set|SqlDWTableDataSet|
+|**--synapse-workspace-sql-pool-table-data-set**|object|A Synapse Workspace Sql Pool Table data set.|synapse_workspace_sql_pool_table_data_set|SynapseWorkspaceSqlPoolTableDataSet|
+
+#### Command `az datashare data-set delete`
+
+##### Example
+```
+az datashare data-set delete --account-name "Account1" --name "Dataset1" --resource-group "SampleResourceGroup" \
+--share-name "Share1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--account-name**|string|The name of the share account.|account_name|accountName|
+|**--share-name**|string|The name of the share.|share_name|shareName|
+|**--data-set-name**|string|The name of the dataSet.|data_set_name|dataSetName|
+
+### group `az datashare data-set-mapping`
+#### Command `az datashare data-set-mapping list`
+
+##### Example
+```
+az datashare data-set-mapping list --account-name "Account1" --resource-group "SampleResourceGroup" \
+--share-subscription-name "ShareSubscription1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--account-name**|string|The name of the share account.|account_name|accountName|
+|**--share-subscription-name**|string|The name of the share subscription.|share_subscription_name|shareSubscriptionName|
+|**--skip-token**|string|Continuation token|skip_token|$skipToken|
+|**--filter**|string|Filters the results using OData syntax.|filter|$filter|
+|**--orderby**|string|Sorts the results using OData syntax.|orderby|$orderby|
+
+#### Command `az datashare data-set-mapping show`
+
+##### Example
+```
+az datashare data-set-mapping show --account-name "Account1" --name "DatasetMapping1" --resource-group \
+"SampleResourceGroup" --share-subscription-name "ShareSubscription1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--account-name**|string|The name of the share account.|account_name|accountName|
+|**--share-subscription-name**|string|The name of the shareSubscription.|share_subscription_name|shareSubscriptionName|
+|**--data-set-mapping-name**|string|The name of the dataSetMapping.|data_set_mapping_name|dataSetMappingName|
+
+#### Command `az datashare data-set-mapping create`
+
+##### Example
+```
+az datashare data-set-mapping create --account-name "Account1" --blob-data-set-mapping container-name="C1" \
+data-set-id="a08f184b-0567-4b11-ba22-a1199336d226" file-path="file21" resource-group="SampleResourceGroup" \
+storage-account-name="storage2" subscription-id="433a8dfd-e5d5-4e77-ad86-90acdc75eb1a" --name "DatasetMapping1" \
+--resource-group "SampleResourceGroup" --share-subscription-name "ShareSubscription1"
+```
+##### Example
+```
+az datashare data-set-mapping create --account-name "Account1" --sqldb-table-data-set-mapping \
+database-name="Database1" data-set-id="a08f184b-0567-4b11-ba22-a1199336d226" schema-name="dbo" \
+sql-server-resource-id="/subscriptions/433a8dfd-e5d5-4e77-ad86-90acdc75eb1a/resourceGroups/SampleResourceGroup/provider\
+s/Microsoft.Sql/servers/Server1" table-name="Table1" --name "DatasetMapping1" --resource-group "SampleResourceGroup" \
+--share-subscription-name "ShareSubscription1"
+```
+##### Example
+```
+az datashare data-set-mapping create --account-name "Account1" --adls-gen2-file-data-set-mapping \
+data-set-id="a08f184b-0567-4b11-ba22-a1199336d226" file-path="file21" file-system="fileSystem" output-type="Csv" \
+resource-group="SampleResourceGroup" storage-account-name="storage2" subscription-id="433a8dfd-e5d5-4e77-ad86-90acdc75e\
+b1a" --name "DatasetMapping1" --resource-group "SampleResourceGroup" --share-subscription-name "ShareSubscription1"
+```
+##### Example
+```
+az datashare data-set-mapping create --account-name "Account1" --sqldw-table-data-set-mapping \
+data-set-id="a08f184b-0567-4b11-ba22-a1199336d226" data-warehouse-name="DataWarehouse1" schema-name="dbo" \
+sql-server-resource-id="/subscriptions/433a8dfd-e5d5-4e77-ad86-90acdc75eb1a/resourceGroups/SampleResourceGroup/provider\
+s/Microsoft.Sql/servers/Server1" table-name="Table1" --name "DatasetMapping1" --resource-group "SampleResourceGroup" \
+--share-subscription-name "ShareSubscription1"
+```
+##### Example
+```
+az datashare data-set-mapping create --account-name "consumerAccount" --synapse-workspace-sql-pool-table-data-set-mappi\
+ng data-set-id="3dc64e49-1fc3-4186-b3dc-d388c4d3076a" synapse-workspace-sql-pool-table-resource-id="/subscriptions/0f3d\
+cfc3-18f8-4099-b381-8353e19d43a7/resourceGroups/SampleResourceGroup/providers/Microsoft.Synapse/workspaces/ExampleWorks\
+pace/sqlPools/ExampleSqlPool/schemas/dbo/tables/table1" --name "datasetMappingName1" --resource-group \
+"SampleResourceGroup" --share-subscription-name "ShareSubscription1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--account-name**|string|The name of the share account.|account_name|accountName|
+|**--share-subscription-name**|string|The name of the share subscription which will hold the data set sink.|share_subscription_name|shareSubscriptionName|
+|**--data-set-mapping-name**|string|The name of the data set mapping to be created.|data_set_mapping_name|dataSetMappingName|
+|**--adls-gen2-file-data-set-mapping**|object|An ADLS Gen2 file data set mapping.|adls_gen2_file_data_set_mapping|ADLSGen2FileDataSetMapping|
+|**--adls-gen2-file-system-data-set-mapping**|object|An ADLS Gen2 file system data set mapping.|adls_gen2_file_system_data_set_mapping|ADLSGen2FileSystemDataSetMapping|
+|**--adls-gen2-folder-data-set-mapping**|object|An ADLS Gen2 folder data set mapping.|adls_gen2_folder_data_set_mapping|ADLSGen2FolderDataSetMapping|
+|**--blob-container-data-set-mapping**|object|A Blob container data set mapping.|blob_container_data_set_mapping|BlobContainerDataSetMapping|
+|**--blob-data-set-mapping**|object|A Blob data set mapping.|blob_data_set_mapping|BlobDataSetMapping|
+|**--blob-folder-data-set-mapping**|object|A Blob folder data set mapping.|blob_folder_data_set_mapping|BlobFolderDataSetMapping|
+|**--kusto-cluster-data-set-mapping**|object|A Kusto cluster data set mapping|kusto_cluster_data_set_mapping|KustoClusterDataSetMapping|
+|**--kusto-database-data-set-mapping**|object|A Kusto database data set mapping|kusto_database_data_set_mapping|KustoDatabaseDataSetMapping|
+|**--sqldb-table-data-set-mapping**|object|A SQL DB Table data set mapping.|sqldb_table_data_set_mapping|SqlDBTableDataSetMapping|
+|**--sqldw-table-data-set-mapping**|object|A SQL DW Table data set mapping.|sqldw_table_data_set_mapping|SqlDWTableDataSetMapping|
+|**--synapse-workspace-sql-pool-table-data-set-mapping**|object|A Synapse Workspace Sql Pool Table data set mapping|synapse_workspace_sql_pool_table_data_set_mapping|SynapseWorkspaceSqlPoolTableDataSetMapping|
+
+#### Command `az datashare data-set-mapping delete`
+
+##### Example
+```
+az datashare data-set-mapping delete --account-name "Account1" --name "DatasetMapping1" --resource-group \
+"SampleResourceGroup" --share-subscription-name "ShareSubscription1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--account-name**|string|The name of the share account.|account_name|accountName|
+|**--share-subscription-name**|string|The name of the shareSubscription.|share_subscription_name|shareSubscriptionName|
+|**--data-set-mapping-name**|string|The name of the dataSetMapping.|data_set_mapping_name|dataSetMappingName|
+
+### group `az datashare invitation`
+#### Command `az datashare invitation list`
+
+##### Example
+```
+az datashare invitation list --account-name "Account1" --resource-group "SampleResourceGroup" --share-name "Share1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--account-name**|string|The name of the share account.|account_name|accountName|
+|**--share-name**|string|The name of the share.|share_name|shareName|
+|**--skip-token**|string|The continuation token|skip_token|$skipToken|
+|**--filter**|string|Filters the results using OData syntax.|filter|$filter|
+|**--orderby**|string|Sorts the results using OData syntax.|orderby|$orderby|
+
+#### Command `az datashare invitation show`
+
+##### Example
+```
+az datashare invitation show --account-name "Account1" --name "Invitation1" --resource-group "SampleResourceGroup" \
+--share-name "Share1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--account-name**|string|The name of the share account.|account_name|accountName|
+|**--share-name**|string|The name of the share.|share_name|shareName|
+|**--invitation-name**|string|The name of the invitation.|invitation_name|invitationName|
+
+#### Command `az datashare invitation create`
+
+##### Example
+```
+az datashare invitation create --account-name "Account1" --expiration-date "2020-08-26T22:33:24.5785265Z" \
+--target-email "receiver@microsoft.com" --name "Invitation1" --resource-group "SampleResourceGroup" --share-name \
+"Share1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--account-name**|string|The name of the share account.|account_name|accountName|
+|**--share-name**|string|The name of the share to send the invitation for.|share_name|shareName|
+|**--invitation-name**|string|The name of the invitation.|invitation_name|invitationName|
+|**--expiration-date**|date-time|The expiration date for the invitation and share subscription.|expiration_date|expirationDate|
+|**--target-active-directory-id**|string|The target Azure AD Id. Can't be combined with email.|target_active_directory_id|targetActiveDirectoryId|
+|**--target-email**|string|The email the invitation is directed to.|target_email|targetEmail|
+|**--target-object-id**|string|The target user or application Id that invitation is being sent to. Must be specified along TargetActiveDirectoryId. This enables sending invitations to specific users or applications in an AD tenant.|target_object_id|targetObjectId|
+
+#### Command `az datashare invitation delete`
+
+##### Example
+```
+az datashare invitation delete --account-name "Account1" --name "Invitation1" --resource-group "SampleResourceGroup" \
+--share-name "Share1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--account-name**|string|The name of the share account.|account_name|accountName|
+|**--share-name**|string|The name of the share.|share_name|shareName|
+|**--invitation-name**|string|The name of the invitation.|invitation_name|invitationName|
+
+### group `az datashare provider-share-subscription`
+#### Command `az datashare provider-share-subscription list`
+
+##### Example
+```
+az datashare provider-share-subscription list --account-name "Account1" --resource-group "SampleResourceGroup" \
+--share-name "Share1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--account-name**|string|The name of the share account.|account_name|accountName|
+|**--share-name**|string|The name of the share.|share_name|shareName|
+|**--skip-token**|string|Continuation Token|skip_token|$skipToken|
+
+#### Command `az datashare provider-share-subscription show`
+
+##### Example
+```
+az datashare provider-share-subscription show --account-name "Account1" --provider-share-subscription-id \
+"4256e2cf-0f82-4865-961b-12f83333f487" --resource-group "SampleResourceGroup" --share-name "Share1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--account-name**|string|The name of the share account.|account_name|accountName|
+|**--share-name**|string|The name of the share.|share_name|shareName|
+|**--provider-share-subscription-id**|string|To locate shareSubscription|provider_share_subscription_id|providerShareSubscriptionId|
+
+#### Command `az datashare provider-share-subscription adjust`
+
+##### Example
+```
+az datashare provider-share-subscription adjust --account-name "Account1" --expiration-date \
+"2020-12-26T22:33:24.5785265Z" --provider-share-subscription-id "4256e2cf-0f82-4865-961b-12f83333f487" \
+--resource-group "SampleResourceGroup" --share-name "Share1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--account-name**|string|The name of the share account.|account_name|accountName|
+|**--share-name**|string|The name of the share.|share_name|shareName|
+|**--provider-share-subscription-id**|string|To locate shareSubscription|provider_share_subscription_id|providerShareSubscriptionId|
+|**--expiration-date**|date-time|Expiration date of the share subscription in UTC format|expiration_date|expirationDate|
+
+#### Command `az datashare provider-share-subscription reinstate`
+
+##### Example
+```
+az datashare provider-share-subscription reinstate --account-name "Account1" --expiration-date \
+"2020-12-26T22:33:24.5785265Z" --provider-share-subscription-id "4256e2cf-0f82-4865-961b-12f83333f487" \
+--resource-group "SampleResourceGroup" --share-name "Share1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--account-name**|string|The name of the share account.|account_name|accountName|
+|**--share-name**|string|The name of the share.|share_name|shareName|
+|**--provider-share-subscription-id**|string|To locate shareSubscription|provider_share_subscription_id|providerShareSubscriptionId|
+|**--expiration-date**|date-time|Expiration date of the share subscription in UTC format|expiration_date|expirationDate|
+
+#### Command `az datashare provider-share-subscription revoke`
+
+##### Example
+```
+az datashare provider-share-subscription revoke --account-name "Account1" --provider-share-subscription-id \
+"4256e2cf-0f82-4865-961b-12f83333f487" --resource-group "SampleResourceGroup" --share-name "Share1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--account-name**|string|The name of the share account.|account_name|accountName|
+|**--share-name**|string|The name of the share.|share_name|shareName|
+|**--provider-share-subscription-id**|string|To locate shareSubscription|provider_share_subscription_id|providerShareSubscriptionId|
+
+### group `az datashare share-subscription`
+#### Command `az datashare share-subscription list`
+
+##### Example
+```
+az datashare share-subscription list --account-name "Account1" --resource-group "SampleResourceGroup"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--account-name**|string|The name of the share account.|account_name|accountName|
+|**--skip-token**|string|Continuation Token|skip_token|$skipToken|
+|**--filter**|string|Filters the results using OData syntax.|filter|$filter|
+|**--orderby**|string|Sorts the results using OData syntax.|orderby|$orderby|
+
+#### Command `az datashare share-subscription show`
+
+##### Example
+```
+az datashare share-subscription show --account-name "Account1" --resource-group "SampleResourceGroup" --name \
+"ShareSubscription1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--account-name**|string|The name of the share account.|account_name|accountName|
+|**--share-subscription-name**|string|The name of the shareSubscription.|share_subscription_name|shareSubscriptionName|
+
+#### Command `az datashare share-subscription create`
+
+##### Example
+```
+az datashare share-subscription create --account-name "Account1" --resource-group "SampleResourceGroup" \
+--expiration-date "2020-08-26T22:33:24.5785265Z" --invitation-id "12345678-1234-1234-12345678abd" \
+--source-share-location "eastus2" --name "ShareSubscription1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--account-name**|string|The name of the share account.|account_name|accountName|
+|**--share-subscription-name**|string|The name of the shareSubscription.|share_subscription_name|shareSubscriptionName|
+|**--invitation-id**|string|The invitation id.|invitation_id|invitationId|
+|**--source-share-location**|string|Source share location.|source_share_location|sourceShareLocation|
+|**--expiration-date**|date-time|The expiration date of the share subscription.|expiration_date|expirationDate|
+
+#### Command `az datashare share-subscription delete`
+
+##### Example
+```
+az datashare share-subscription delete --account-name "Account1" --resource-group "SampleResourceGroup" --name \
+"ShareSubscription1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--account-name**|string|The name of the share account.|account_name|accountName|
+|**--share-subscription-name**|string|The name of the shareSubscription.|share_subscription_name|shareSubscriptionName|
+
+#### Command `az datashare share-subscription cancel-synchronization`
+
+##### Example
+```
+az datashare share-subscription cancel-synchronization --account-name "Account1" --resource-group \
+"SampleResourceGroup" --name "ShareSubscription1" --synchronization-id "7d0536a6-3fa5-43de-b152-3d07c4f6b2bb"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--account-name**|string|The name of the share account.|account_name|accountName|
+|**--share-subscription-name**|string|The name of the shareSubscription.|share_subscription_name|shareSubscriptionName|
+|**--synchronization-id**|string|Synchronization id|synchronization_id|synchronizationId|
+
+#### Command `az datashare share-subscription list-source-share-synchronization-setting`
+
+##### Example
+```
+az datashare share-subscription list-source-share-synchronization-setting --account-name "Account1" --resource-group \
+"SampleResourceGroup" --name "ShareSub1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--account-name**|string|The name of the share account.|account_name|accountName|
+|**--share-subscription-name**|string|The name of the shareSubscription.|share_subscription_name|shareSubscriptionName|
+|**--skip-token**|string|Continuation token|skip_token|$skipToken|
+
+#### Command `az datashare share-subscription list-synchronization`
+
+##### Example
+```
+az datashare share-subscription list-synchronization --account-name "Account1" --resource-group "SampleResourceGroup" \
+--name "ShareSub1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--account-name**|string|The name of the share account.|account_name|accountName|
+|**--share-subscription-name**|string|The name of the share subscription.|share_subscription_name|shareSubscriptionName|
+|**--skip-token**|string|Continuation token|skip_token|$skipToken|
+|**--filter**|string|Filters the results using OData syntax.|filter|$filter|
+|**--orderby**|string|Sorts the results using OData syntax.|orderby|$orderby|
+
+#### Command `az datashare share-subscription list-synchronization-detail`
+
+##### Example
+```
+az datashare share-subscription list-synchronization-detail --account-name "Account1" --resource-group \
+"SampleResourceGroup" --name "ShareSub1" --synchronization-id "7d0536a6-3fa5-43de-b152-3d07c4f6b2bb"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--account-name**|string|The name of the share account.|account_name|accountName|
+|**--share-subscription-name**|string|The name of the share subscription.|share_subscription_name|shareSubscriptionName|
+|**--synchronization-id**|string|Synchronization id|synchronization_id|synchronizationId|
+|**--skip-token**|string|Continuation token|skip_token|$skipToken|
+|**--filter**|string|Filters the results using OData syntax.|filter|$filter|
+|**--orderby**|string|Sorts the results using OData syntax.|orderby|$orderby|
+
+#### Command `az datashare share-subscription synchronize`
+
+##### Example
+```
+az datashare share-subscription synchronize --account-name "Account1" --resource-group "SampleResourceGroup" --name \
+"ShareSubscription1" --synchronization-mode "Incremental"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--account-name**|string|The name of the share account.|account_name|accountName|
+|**--share-subscription-name**|string|The name of share subscription|share_subscription_name|shareSubscriptionName|
+|**--synchronization-mode**|choice|Mode of synchronization used in triggers and snapshot sync. Incremental by default|synchronization_mode|synchronizationMode|
+
+### group `az datashare synchronization-setting`
+#### Command `az datashare synchronization-setting list`
+
+##### Example
+```
+az datashare synchronization-setting list --account-name "Account1" --resource-group "SampleResourceGroup" \
+--share-name "Share1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--account-name**|string|The name of the share account.|account_name|accountName|
+|**--share-name**|string|The name of the share.|share_name|shareName|
+|**--skip-token**|string|continuation token|skip_token|$skipToken|
+
+#### Command `az datashare synchronization-setting show`
+
+##### Example
+```
+az datashare synchronization-setting show --account-name "Account1" --resource-group "SampleResourceGroup" \
+--share-name "Share1" --name "SynchronizationSetting1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--account-name**|string|The name of the share account.|account_name|accountName|
+|**--share-name**|string|The name of the share.|share_name|shareName|
+|**--synchronization-setting-name**|string|The name of the synchronizationSetting.|synchronization_setting_name|synchronizationSettingName|
+
+#### Command `az datashare synchronization-setting create`
+
+##### Example
+```
+az datashare synchronization-setting create --account-name "Account1" --resource-group "SampleResourceGroup" \
+--share-name "Share1" --scheduled-synchronization-setting recurrence-interval="Day" synchronization-time="2018-11-14T04\
+:47:52.9614956Z" --name "Dataset1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--account-name**|string|The name of the share account.|account_name|accountName|
+|**--share-name**|string|The name of the share to add the synchronization setting to.|share_name|shareName|
+|**--synchronization-setting-name**|string|The name of the synchronizationSetting.|synchronization_setting_name|synchronizationSettingName|
+|**--scheduled-synchronization-setting**|object|A type of synchronization setting based on schedule|scheduled_synchronization_setting|ScheduledSynchronizationSetting|
+
+#### Command `az datashare synchronization-setting delete`
+
+##### Example
+```
+az datashare synchronization-setting delete --account-name "Account1" --resource-group "SampleResourceGroup" \
+--share-name "Share1" --name "SynchronizationSetting1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--account-name**|string|The name of the share account.|account_name|accountName|
+|**--share-name**|string|The name of the share.|share_name|shareName|
+|**--synchronization-setting-name**|string|The name of the synchronizationSetting .|synchronization_setting_name|synchronizationSettingName|
+
+### group `az datashare trigger`
+#### Command `az datashare trigger list`
+
+##### Example
+```
+az datashare trigger list --account-name "Account1" --resource-group "SampleResourceGroup" --share-subscription-name \
+"ShareSubscription1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--account-name**|string|The name of the share account.|account_name|accountName|
+|**--share-subscription-name**|string|The name of the share subscription.|share_subscription_name|shareSubscriptionName|
+|**--skip-token**|string|Continuation token|skip_token|$skipToken|
+
+#### Command `az datashare trigger show`
+
+##### Example
+```
+az datashare trigger show --account-name "Account1" --resource-group "SampleResourceGroup" --share-subscription-name \
+"ShareSubscription1" --name "Trigger1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--account-name**|string|The name of the share account.|account_name|accountName|
+|**--share-subscription-name**|string|The name of the shareSubscription.|share_subscription_name|shareSubscriptionName|
+|**--trigger-name**|string|The name of the trigger.|trigger_name|triggerName|
+
+#### Command `az datashare trigger create`
+
+##### Example
+```
+az datashare trigger create --account-name "Account1" --resource-group "SampleResourceGroup" --share-subscription-name \
+"ShareSubscription1" --scheduled-trigger recurrence-interval="Day" synchronization-mode="Incremental" \
+synchronization-time="2018-11-14T04:47:52.9614956Z" --name "Trigger1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--account-name**|string|The name of the share account.|account_name|accountName|
+|**--share-subscription-name**|string|The name of the share subscription which will hold the data set sink.|share_subscription_name|shareSubscriptionName|
+|**--trigger-name**|string|The name of the trigger.|trigger_name|triggerName|
+|**--scheduled-trigger**|object|A type of trigger based on schedule|scheduled_trigger|ScheduledTrigger|
+
+#### Command `az datashare trigger delete`
+
+##### Example
+```
+az datashare trigger delete --account-name "Account1" --resource-group "SampleResourceGroup" --share-subscription-name \
+"ShareSubscription1" --name "Trigger1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--account-name**|string|The name of the share account.|account_name|accountName|
+|**--share-subscription-name**|string|The name of the shareSubscription.|share_subscription_name|shareSubscriptionName|
+|**--trigger-name**|string|The name of the trigger.|trigger_name|triggerName|
diff --git a/src/datashare/setup.cfg b/src/datashare/setup.cfg
index e69de29bb2d..2fdd96e5d39 100644
--- a/src/datashare/setup.cfg
+++ b/src/datashare/setup.cfg
@@ -0,0 +1 @@
+#setup.cfg
\ No newline at end of file
diff --git a/src/datashare/setup.py b/src/datashare/setup.py
index 78fb8cdd85d..ea344433715 100644
--- a/src/datashare/setup.py
+++ b/src/datashare/setup.py
@@ -1,57 +1,58 @@
-#!/usr/bin/env python
-
-# --------------------------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# --------------------------------------------------------------------------------------------
-
-
-from codecs import open
-from setuptools import setup, find_packages
-try:
- from azure_bdist_wheel import cmdclass
-except ImportError:
- from distutils import log as logger
- logger.warn("Wheel is not available, disabling bdist_wheel hook")
-
-# TODO: Confirm this is the right version number you want and it matches your
-# HISTORY.rst entry.
-VERSION = '0.1.1'
-
-# The full list of classifiers is available at
-# https://pypi.python.org/pypi?%3Aaction=list_classifiers
-CLASSIFIERS = [
- 'Development Status :: 4 - Beta',
- 'Intended Audience :: Developers',
- 'Intended Audience :: System Administrators',
- 'Programming Language :: Python',
- 'Programming Language :: Python :: 3',
- 'Programming Language :: Python :: 3.6',
- 'Programming Language :: Python :: 3.7',
- 'Programming Language :: Python :: 3.8',
- 'License :: OSI Approved :: MIT License',
-]
-
-# TODO: Add any additional SDK dependencies here
-DEPENDENCIES = []
-
-with open('README.md', 'r', encoding='utf-8') as f:
- README = f.read()
-with open('HISTORY.rst', 'r', encoding='utf-8') as f:
- HISTORY = f.read()
-
-setup(
- name='datashare',
- version=VERSION,
- description='Microsoft Azure Command-Line Tools DataShareManagementClient Extension',
- # TODO: Update author and email, if applicable
- author='Microsoft Corporation',
- author_email='azpycli@microsoft.com',
- url='https://github.com/Azure/azure-cli-extensions/tree/master/src/datashare',
- long_description=README + '\n\n' + HISTORY,
- license='MIT',
- classifiers=CLASSIFIERS,
- packages=find_packages(),
- install_requires=DEPENDENCIES,
- package_data={'azext_datashare': ['azext_metadata.json']},
-)
+#!/usr/bin/env python
+
+# --------------------------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# --------------------------------------------------------------------------------------------
+
+
+from codecs import open
+from setuptools import setup, find_packages
+
+# HISTORY.rst entry.
+VERSION = '0.1.0'
+try:
+ from azext_datashare.manual.version import VERSION
+except ImportError:
+ pass
+
+# The full list of classifiers is available at
+# https://pypi.python.org/pypi?%3Aaction=list_classifiers
+CLASSIFIERS = [
+ 'Development Status :: 4 - Beta',
+ 'Intended Audience :: Developers',
+ 'Intended Audience :: System Administrators',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.6',
+ 'Programming Language :: Python :: 3.7',
+ 'Programming Language :: Python :: 3.8',
+ 'License :: OSI Approved :: MIT License',
+]
+
+DEPENDENCIES = []
+
+try:
+ from azext_datashare.manual.dependency import DEPENDENCIES
+except ImportError:
+ pass
+
+with open('README.md', 'r', encoding='utf-8') as f:
+ README = f.read()
+with open('HISTORY.rst', 'r', encoding='utf-8') as f:
+ HISTORY = f.read()
+
+setup(
+ name='datashare',
+ version=VERSION,
+ description='Microsoft Azure Command-Line Tools DataShareManagementClient Extension',
+ author='Microsoft Corporation',
+ author_email='azpycli@microsoft.com',
+ url='https://github.com/Azure/azure-cli-extensions/tree/master/src/datashare',
+ long_description=README + '\n\n' + HISTORY,
+ license='MIT',
+ classifiers=CLASSIFIERS,
+ packages=find_packages(),
+ install_requires=DEPENDENCIES,
+ package_data={'azext_datashare': ['azext_metadata.json']},
+)