diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/_meta.json b/sdk/streamanalytics/azure-mgmt-streamanalytics/_meta.json index f6069e8befab..e139028a7b25 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/_meta.json +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/_meta.json @@ -1,11 +1,11 @@ { - "commit": "a2da92ad78961529a087f9d0e65394174ac50794", + "commit": "75ee6087ec36c709122d00c380df49fc515000c3", "repository_url": "https://github.com/Azure/azure-rest-api-specs", "autorest": "3.9.2", "use": [ - "@autorest/python@6.2.1", + "@autorest/python@6.5.0", "@autorest/modelerfour@4.24.3" ], - "autorest_command": "autorest specification/streamanalytics/resource-manager/readme.md --generate-sample=True --include-x-ms-examples-original-file=True --python --python-sdks-folder=/home/vsts/work/1/azure-sdk-for-python/sdk --tag=package-2021-10-preview --use=@autorest/python@6.2.1 --use=@autorest/modelerfour@4.24.3 --version=3.9.2 --version-tolerant=False", + "autorest_command": "autorest specification/streamanalytics/resource-manager/readme.md --generate-sample=True --include-x-ms-examples-original-file=True --python --python-sdks-folder=/mnt/vss/_work/1/s/azure-sdk-for-python/sdk --use=@autorest/python@6.5.0 --use=@autorest/modelerfour@4.24.3 --version=3.9.2 --version-tolerant=False", "readme": "specification/streamanalytics/resource-manager/readme.md" } \ No newline at end of file diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/__init__.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/__init__.py index d9b949b69f3b..0d7576b75ab7 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/__init__.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/__init__.py @@ -13,7 +13,7 @@ try: from ._patch import __all__ as _patch_all - from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import + from ._patch import * # pylint: disable=unused-wildcard-import except ImportError: _patch_all = [] from ._patch import patch_sdk as _patch_sdk diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_configuration.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_configuration.py index 21cda12acfd2..010852606c29 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_configuration.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_configuration.py @@ -29,10 +29,15 @@ class StreamAnalyticsManagementClientConfiguration(Configuration): # pylint: di :type credential: ~azure.core.credentials.TokenCredential :param subscription_id: The ID of the target subscription. Required. :type subscription_id: str + :keyword api_version: Api Version. Default value is "2020-03-01". Note that overriding this + default value may result in unsupported behavior. + :paramtype api_version: str """ def __init__(self, credential: "TokenCredential", subscription_id: str, **kwargs: Any) -> None: super(StreamAnalyticsManagementClientConfiguration, self).__init__(**kwargs) + api_version: str = kwargs.pop("api_version", "2020-03-01") + if credential is None: raise ValueError("Parameter 'credential' must not be None.") if subscription_id is None: @@ -40,14 +45,12 @@ def __init__(self, credential: "TokenCredential", subscription_id: str, **kwargs self.credential = credential self.subscription_id = subscription_id + self.api_version = api_version self.credential_scopes = kwargs.pop("credential_scopes", ["https://management.azure.com/.default"]) kwargs.setdefault("sdk_moniker", "mgmt-streamanalytics/{}".format(VERSION)) self._configure(**kwargs) - def _configure( - self, **kwargs # type: Any - ): - # type: (...) -> None + def _configure(self, **kwargs: Any) -> None: self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_serialization.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_serialization.py index 7c1dedb5133d..842ae727fbbc 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_serialization.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_serialization.py @@ -25,6 +25,7 @@ # -------------------------------------------------------------------------- # pylint: skip-file +# pyright: reportUnnecessaryTypeIgnoreComment=false from base64 import b64decode, b64encode import calendar @@ -37,23 +38,38 @@ import re import sys import codecs +from typing import ( + Dict, + Any, + cast, + Optional, + Union, + AnyStr, + IO, + Mapping, + Callable, + TypeVar, + MutableMapping, + Type, + List, + Mapping, +) try: from urllib import quote # type: ignore except ImportError: - from urllib.parse import quote # type: ignore + from urllib.parse import quote import xml.etree.ElementTree as ET -import isodate - -from typing import Dict, Any, cast, TYPE_CHECKING +import isodate # type: ignore from azure.core.exceptions import DeserializationError, SerializationError, raise_with_traceback +from azure.core.serialization import NULL as AzureCoreNull _BOM = codecs.BOM_UTF8.decode(encoding="utf-8") -if TYPE_CHECKING: - from typing import Optional, Union, AnyStr, IO, Mapping +ModelType = TypeVar("ModelType", bound="Model") +JSON = MutableMapping[str, Any] class RawDeserializer: @@ -65,8 +81,7 @@ class RawDeserializer: CONTEXT_NAME = "deserialized_data" @classmethod - def deserialize_from_text(cls, data, content_type=None): - # type: (Optional[Union[AnyStr, IO]], Optional[str]) -> Any + def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: Optional[str] = None) -> Any: """Decode data according to content-type. Accept a stream of data as well, but will be load at once in memory for now. @@ -132,8 +147,7 @@ def _json_attemp(data): raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) @classmethod - def deserialize_from_http_generics(cls, body_bytes, headers): - # type: (Optional[Union[AnyStr, IO]], Mapping) -> Any + def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], headers: Mapping) -> Any: """Deserialize from HTTP response. Use bytes and headers to NOT use any requests/aiohttp or whatever @@ -160,8 +174,8 @@ def deserialize_from_http_generics(cls, body_bytes, headers): basestring # type: ignore unicode_str = unicode # type: ignore except NameError: - basestring = str # type: ignore - unicode_str = str # type: ignore + basestring = str + unicode_str = str _LOGGER = logging.getLogger(__name__) @@ -188,7 +202,7 @@ def dst(self, dt): try: - from datetime import timezone as _FixedOffset + from datetime import timezone as _FixedOffset # type: ignore except ImportError: # Python 2.7 class _FixedOffset(datetime.tzinfo): # type: ignore @@ -219,7 +233,7 @@ def __getinitargs__(self): try: from datetime import timezone - TZ_UTC = timezone.utc # type: ignore + TZ_UTC = timezone.utc except ImportError: TZ_UTC = UTC() # type: ignore @@ -276,12 +290,12 @@ class Model(object): serialization and deserialization. """ - _subtype_map = {} # type: Dict[str, Dict[str, Any]] - _attribute_map = {} # type: Dict[str, Dict[str, Any]] - _validation = {} # type: Dict[str, Dict[str, Any]] + _subtype_map: Dict[str, Dict[str, Any]] = {} + _attribute_map: Dict[str, Dict[str, Any]] = {} + _validation: Dict[str, Dict[str, Any]] = {} - def __init__(self, **kwargs): - self.additional_properties = {} + def __init__(self, **kwargs: Any) -> None: + self.additional_properties: Dict[str, Any] = {} for k in kwargs: if k not in self._attribute_map: _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__) @@ -290,27 +304,27 @@ def __init__(self, **kwargs): else: setattr(self, k, kwargs[k]) - def __eq__(self, other): + def __eq__(self, other: Any) -> bool: """Compare objects by comparing all attributes.""" if isinstance(other, self.__class__): return self.__dict__ == other.__dict__ return False - def __ne__(self, other): + def __ne__(self, other: Any) -> bool: """Compare objects by comparing all attributes.""" return not self.__eq__(other) - def __str__(self): + def __str__(self) -> str: return str(self.__dict__) @classmethod - def enable_additional_properties_sending(cls): + def enable_additional_properties_sending(cls) -> None: cls._attribute_map["additional_properties"] = {"key": "", "type": "{object}"} @classmethod - def is_xml_model(cls): + def is_xml_model(cls) -> bool: try: - cls._xml_map + cls._xml_map # type: ignore except AttributeError: return False return True @@ -319,13 +333,13 @@ def is_xml_model(cls): def _create_xml_node(cls): """Create XML node.""" try: - xml_map = cls._xml_map + xml_map = cls._xml_map # type: ignore except AttributeError: xml_map = {} return _create_xml_node(xml_map.get("name", cls.__name__), xml_map.get("prefix", None), xml_map.get("ns", None)) - def serialize(self, keep_readonly=False, **kwargs): + def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON: """Return the JSON that would be sent to azure from this model. This is an alias to `as_dict(full_restapi_key_transformer, keep_readonly=False)`. @@ -339,8 +353,13 @@ def serialize(self, keep_readonly=False, **kwargs): serializer = Serializer(self._infer_class_models()) return serializer._serialize(self, keep_readonly=keep_readonly, **kwargs) - def as_dict(self, keep_readonly=True, key_transformer=attribute_transformer, **kwargs): - """Return a dict that can be JSONify using json.dump. + def as_dict( + self, + keep_readonly: bool = True, + key_transformer: Callable[[str, Dict[str, Any], Any], Any] = attribute_transformer, + **kwargs: Any + ) -> JSON: + """Return a dict that can be serialized using json.dump. Advanced usage might optionally use a callback as parameter: @@ -387,7 +406,7 @@ def _infer_class_models(cls): return client_models @classmethod - def deserialize(cls, data, content_type=None): + def deserialize(cls: Type[ModelType], data: Any, content_type: Optional[str] = None) -> ModelType: """Parse a str using the RestAPI syntax and return a model. :param str data: A str using RestAPI structure. JSON by default. @@ -399,7 +418,12 @@ def deserialize(cls, data, content_type=None): return deserializer(cls.__name__, data, content_type=content_type) @classmethod - def from_dict(cls, data, key_extractors=None, content_type=None): + def from_dict( + cls: Type[ModelType], + data: Any, + key_extractors: Optional[Callable[[str, Dict[str, Any], Any], Any]] = None, + content_type: Optional[str] = None, + ) -> ModelType: """Parse a dict using given key extractor return a model. By default consider key @@ -412,8 +436,8 @@ def from_dict(cls, data, key_extractors=None, content_type=None): :raises: DeserializationError if something went wrong """ deserializer = Deserializer(cls._infer_class_models()) - deserializer.key_extractors = ( - [ + deserializer.key_extractors = ( # type: ignore + [ # type: ignore attribute_key_case_insensitive_extractor, rest_key_case_insensitive_extractor, last_rest_key_case_insensitive_extractor, @@ -453,7 +477,7 @@ def _classify(cls, response, objects): return cls flatten_mapping_type = cls._flatten_subtype(subtype_key, objects) try: - return objects[flatten_mapping_type[subtype_value]] + return objects[flatten_mapping_type[subtype_value]] # type: ignore except KeyError: _LOGGER.warning( "Subtype value %s has no mapping, use base class %s.", @@ -521,7 +545,7 @@ class Serializer(object): "multiple": lambda x, y: x % y != 0, } - def __init__(self, classes=None): + def __init__(self, classes: Optional[Mapping[str, Type[ModelType]]] = None): self.serialize_type = { "iso-8601": Serializer.serialize_iso, "rfc-1123": Serializer.serialize_rfc, @@ -537,7 +561,7 @@ def __init__(self, classes=None): "[]": self.serialize_iter, "{}": self.serialize_dict, } - self.dependencies = dict(classes) if classes else {} + self.dependencies: Dict[str, Type[ModelType]] = dict(classes) if classes else {} self.key_transformer = full_restapi_key_transformer self.client_side_validation = True @@ -605,14 +629,14 @@ def _serialize(self, target_obj, data_type=None, **kwargs): if xml_desc.get("attr", False): if xml_ns: ET.register_namespace(xml_prefix, xml_ns) - xml_name = "{}{}".format(xml_ns, xml_name) - serialized.set(xml_name, new_attr) + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + serialized.set(xml_name, new_attr) # type: ignore continue if xml_desc.get("text", False): - serialized.text = new_attr + serialized.text = new_attr # type: ignore continue if isinstance(new_attr, list): - serialized.extend(new_attr) + serialized.extend(new_attr) # type: ignore elif isinstance(new_attr, ET.Element): # If the down XML has no XML/Name, we MUST replace the tag with the local tag. But keeping the namespaces. if "name" not in getattr(orig_attr, "_xml_map", {}): @@ -621,23 +645,22 @@ def _serialize(self, target_obj, data_type=None, **kwargs): new_attr.tag = "}".join([splitted_tag[0], xml_name]) else: new_attr.tag = xml_name - serialized.append(new_attr) + serialized.append(new_attr) # type: ignore else: # That's a basic type # Integrate namespace if necessary local_node = _create_xml_node(xml_name, xml_prefix, xml_ns) local_node.text = unicode_str(new_attr) - serialized.append(local_node) + serialized.append(local_node) # type: ignore else: # JSON - for k in reversed(keys): - unflattened = {k: new_attr} - new_attr = unflattened + for k in reversed(keys): # type: ignore + new_attr = {k: new_attr} _new_attr = new_attr _serialized = serialized - for k in keys: + for k in keys: # type: ignore if k not in _serialized: - _serialized.update(_new_attr) - _new_attr = _new_attr[k] + _serialized.update(_new_attr) # type: ignore + _new_attr = _new_attr[k] # type: ignore _serialized = _serialized[k] except ValueError: continue @@ -659,8 +682,8 @@ def body(self, data, data_type, **kwargs): """ # Just in case this is a dict - internal_data_type = data_type.strip("[]{}") - internal_data_type = self.dependencies.get(internal_data_type, None) + internal_data_type_str = data_type.strip("[]{}") + internal_data_type = self.dependencies.get(internal_data_type_str, None) try: is_xml_model_serialization = kwargs["is_xml"] except KeyError: @@ -675,7 +698,7 @@ def body(self, data, data_type, **kwargs): # We're not able to deal with additional properties for now. deserializer.additional_properties_detection = False if is_xml_model_serialization: - deserializer.key_extractors = [ + deserializer.key_extractors = [ # type: ignore attribute_key_case_insensitive_extractor, ] else: @@ -780,6 +803,8 @@ def serialize_data(self, data, data_type, **kwargs): raise ValueError("No value for given attribute") try: + if data is AzureCoreNull: + return None if data_type in self.basic_types.values(): return self.serialize_basic(data, data_type, **kwargs) @@ -843,7 +868,7 @@ def serialize_unicode(cls, data): pass try: - if isinstance(data, unicode): + if isinstance(data, unicode): # type: ignore # Don't change it, JSON and XML ElementTree are totally able # to serialize correctly u'' strings return data @@ -1001,10 +1026,10 @@ def serialize_enum(attr, enum_obj=None): except AttributeError: result = attr try: - enum_obj(result) + enum_obj(result) # type: ignore return result except ValueError: - for enum_value in enum_obj: + for enum_value in enum_obj: # type: ignore if enum_value.value.lower() == str(attr).lower(): return enum_value.value error = "{!r} is not valid value for enum {!r}" @@ -1164,7 +1189,8 @@ def rest_key_extractor(attr, attr_desc, data): working_data = data while "." in key: - dict_keys = _FLATTEN.split(key) + # Need the cast, as for some reasons "split" is typed as list[str | Any] + dict_keys = cast(List[str], _FLATTEN.split(key)) if len(dict_keys) == 1: key = _decode_attribute_map_key(dict_keys[0]) break @@ -1245,7 +1271,7 @@ def _extract_name_from_internal_type(internal_type): xml_name = internal_type_xml_map.get("name", internal_type.__name__) xml_ns = internal_type_xml_map.get("ns", None) if xml_ns: - xml_name = "{}{}".format(xml_ns, xml_name) + xml_name = "{{{}}}{}".format(xml_ns, xml_name) return xml_name @@ -1269,7 +1295,7 @@ def xml_key_extractor(attr, attr_desc, data): # Integrate namespace if necessary xml_ns = xml_desc.get("ns", internal_type_xml_map.get("ns", None)) if xml_ns: - xml_name = "{}{}".format(xml_ns, xml_name) + xml_name = "{{{}}}{}".format(xml_ns, xml_name) # If it's an attribute, that's simple if xml_desc.get("attr", False): @@ -1335,7 +1361,7 @@ class Deserializer(object): valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") - def __init__(self, classes=None): + def __init__(self, classes: Optional[Mapping[str, Type[ModelType]]] = None): self.deserialize_type = { "iso-8601": Deserializer.deserialize_iso, "rfc-1123": Deserializer.deserialize_rfc, @@ -1355,7 +1381,7 @@ def __init__(self, classes=None): "duration": (isodate.Duration, datetime.timedelta), "iso-8601": (datetime.datetime), } - self.dependencies = dict(classes) if classes else {} + self.dependencies: Dict[str, Type[ModelType]] = dict(classes) if classes else {} self.key_extractors = [rest_key_extractor, xml_key_extractor] # Additional properties only works if the "rest_key_extractor" is used to # extract the keys. Making it to work whatever the key extractor is too much @@ -1416,7 +1442,7 @@ def _deserialize(self, target_obj, data): if data is None: return data try: - attributes = response._attribute_map + attributes = response._attribute_map # type: ignore d_attrs = {} for attr, attr_desc in attributes.items(): # Check empty string. If it's not empty, someone has a real "additionalProperties"... @@ -1444,7 +1470,7 @@ def _deserialize(self, target_obj, data): value = self.deserialize_data(raw_value, attr_desc["type"]) d_attrs[attr] = value except (AttributeError, TypeError, KeyError) as err: - msg = "Unable to deserialize to object: " + class_name + msg = "Unable to deserialize to object: " + class_name # type: ignore raise_with_traceback(DeserializationError, msg, err) else: additional_properties = self._build_additional_properties(attributes, data) @@ -1474,7 +1500,7 @@ def _classify_target(self, target, data): Once classification has been determined, initialize object. :param str target: The target object type to deserialize to. - :param str/dict data: The response data to deseralize. + :param str/dict data: The response data to deserialize. """ if target is None: return None, None @@ -1489,7 +1515,7 @@ def _classify_target(self, target, data): target = target._classify(data, self.dependencies) except AttributeError: pass # Target is not a Model, no classify - return target, target.__class__.__name__ + return target, target.__class__.__name__ # type: ignore def failsafe_deserialize(self, target_obj, data, content_type=None): """Ignores any errors encountered in deserialization, @@ -1499,7 +1525,7 @@ def failsafe_deserialize(self, target_obj, data, content_type=None): a deserialization error. :param str target_obj: The target object type to deserialize to. - :param str/dict data: The response data to deseralize. + :param str/dict data: The response data to deserialize. :param str content_type: Swagger "produces" if available. """ try: @@ -1543,7 +1569,7 @@ def _unpack_content(raw_data, content_type=None): return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers) if isinstance(raw_data, (basestring, bytes)) or hasattr(raw_data, "read"): - return RawDeserializer.deserialize_from_text(raw_data, content_type) + return RawDeserializer.deserialize_from_text(raw_data, content_type) # type: ignore return raw_data def _instantiate_model(self, response, attrs, additional_properties=None): @@ -1565,7 +1591,7 @@ def _instantiate_model(self, response, attrs, additional_properties=None): response_obj.additional_properties = additional_properties return response_obj except TypeError as err: - msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) + msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore raise DeserializationError(msg + str(err)) else: try: @@ -1747,7 +1773,7 @@ def deserialize_unicode(data): # Consider this is real string try: - if isinstance(data, unicode): + if isinstance(data, unicode): # type: ignore return data except NameError: return str(data) @@ -1798,7 +1824,7 @@ def deserialize_bytearray(attr): """ if isinstance(attr, ET.Element): attr = attr.text - return bytearray(b64decode(attr)) + return bytearray(b64decode(attr)) # type: ignore @staticmethod def deserialize_base64(attr): @@ -1810,8 +1836,8 @@ def deserialize_base64(attr): """ if isinstance(attr, ET.Element): attr = attr.text - padding = "=" * (3 - (len(attr) + 3) % 4) - attr = attr + padding + padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore + attr = attr + padding # type: ignore encoded = attr.replace("-", "+").replace("_", "/") return b64decode(encoded) @@ -1826,7 +1852,7 @@ def deserialize_decimal(attr): if isinstance(attr, ET.Element): attr = attr.text try: - return decimal.Decimal(attr) + return decimal.Decimal(attr) # type: ignore except decimal.DecimalException as err: msg = "Invalid decimal {}".format(attr) raise_with_traceback(DeserializationError, msg, err) @@ -1841,7 +1867,7 @@ def deserialize_long(attr): """ if isinstance(attr, ET.Element): attr = attr.text - return _long_type(attr) + return _long_type(attr) # type: ignore @staticmethod def deserialize_duration(attr): @@ -1871,7 +1897,7 @@ def deserialize_date(attr): """ if isinstance(attr, ET.Element): attr = attr.text - if re.search(r"[^\W\d_]", attr, re.I + re.U): + if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore raise DeserializationError("Date must have only digits and -. Received: %s" % attr) # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. return isodate.parse_date(attr, defaultmonth=None, defaultday=None) @@ -1886,7 +1912,7 @@ def deserialize_time(attr): """ if isinstance(attr, ET.Element): attr = attr.text - if re.search(r"[^\W\d_]", attr, re.I + re.U): + if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore raise DeserializationError("Date must have only digits and -. Received: %s" % attr) return isodate.parse_time(attr) @@ -1901,7 +1927,7 @@ def deserialize_rfc(attr): if isinstance(attr, ET.Element): attr = attr.text try: - parsed_date = email.utils.parsedate_tz(attr) + parsed_date = email.utils.parsedate_tz(attr) # type: ignore date_obj = datetime.datetime( *parsed_date[:6], tzinfo=_FixedOffset(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60)) ) @@ -1924,7 +1950,7 @@ def deserialize_iso(attr): if isinstance(attr, ET.Element): attr = attr.text try: - attr = attr.upper() + attr = attr.upper() # type: ignore match = Deserializer.valid_date.match(attr) if not match: raise ValueError("Invalid datetime string: " + attr) @@ -1960,7 +1986,7 @@ def deserialize_unix(attr): :raises: DeserializationError if format invalid """ if isinstance(attr, ET.Element): - attr = int(attr.text) + attr = int(attr.text) # type: ignore try: date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC) except ValueError as err: diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_stream_analytics_management_client.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_stream_analytics_management_client.py index d66eed577659..de70e76a2339 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_stream_analytics_management_client.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_stream_analytics_management_client.py @@ -12,7 +12,7 @@ from azure.core.rest import HttpRequest, HttpResponse from azure.mgmt.core import ARMPipelineClient -from . import models +from . import models as _models from ._configuration import StreamAnalyticsManagementClientConfiguration from ._serialization import Deserializer, Serializer from .operations import ( @@ -22,7 +22,6 @@ Operations, OutputsOperations, PrivateEndpointsOperations, - SkuOperations, StreamingJobsOperations, SubscriptionsOperations, TransformationsOperations, @@ -36,22 +35,20 @@ class StreamAnalyticsManagementClient: # pylint: disable=client-accepts-api-version-keyword,too-many-instance-attributes """Stream Analytics Client. - :ivar functions: FunctionsOperations operations - :vartype functions: azure.mgmt.streamanalytics.operations.FunctionsOperations - :ivar inputs: InputsOperations operations - :vartype inputs: azure.mgmt.streamanalytics.operations.InputsOperations - :ivar outputs: OutputsOperations operations - :vartype outputs: azure.mgmt.streamanalytics.operations.OutputsOperations :ivar operations: Operations operations :vartype operations: azure.mgmt.streamanalytics.operations.Operations :ivar streaming_jobs: StreamingJobsOperations operations :vartype streaming_jobs: azure.mgmt.streamanalytics.operations.StreamingJobsOperations - :ivar sku: SkuOperations operations - :vartype sku: azure.mgmt.streamanalytics.operations.SkuOperations - :ivar subscriptions: SubscriptionsOperations operations - :vartype subscriptions: azure.mgmt.streamanalytics.operations.SubscriptionsOperations + :ivar inputs: InputsOperations operations + :vartype inputs: azure.mgmt.streamanalytics.operations.InputsOperations + :ivar outputs: OutputsOperations operations + :vartype outputs: azure.mgmt.streamanalytics.operations.OutputsOperations :ivar transformations: TransformationsOperations operations :vartype transformations: azure.mgmt.streamanalytics.operations.TransformationsOperations + :ivar functions: FunctionsOperations operations + :vartype functions: azure.mgmt.streamanalytics.operations.FunctionsOperations + :ivar subscriptions: SubscriptionsOperations operations + :vartype subscriptions: azure.mgmt.streamanalytics.operations.SubscriptionsOperations :ivar clusters: ClustersOperations operations :vartype clusters: azure.mgmt.streamanalytics.operations.ClustersOperations :ivar private_endpoints: PrivateEndpointsOperations operations @@ -62,6 +59,9 @@ class StreamAnalyticsManagementClient: # pylint: disable=client-accepts-api-ver :type subscription_id: str :param base_url: Service URL. Default value is "https://management.azure.com". :type base_url: str + :keyword api_version: Api Version. Default value is "2020-03-01". Note that overriding this + default value may result in unsupported behavior. + :paramtype api_version: str :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. """ @@ -76,20 +76,19 @@ def __init__( self._config = StreamAnalyticsManagementClientConfiguration( credential=credential, subscription_id=subscription_id, **kwargs ) - self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs) + self._client: ARMPipelineClient = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs) - client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) self._deserialize = Deserializer(client_models) self._serialize.client_side_validation = False - self.functions = FunctionsOperations(self._client, self._config, self._serialize, self._deserialize) - self.inputs = InputsOperations(self._client, self._config, self._serialize, self._deserialize) - self.outputs = OutputsOperations(self._client, self._config, self._serialize, self._deserialize) self.operations = Operations(self._client, self._config, self._serialize, self._deserialize) self.streaming_jobs = StreamingJobsOperations(self._client, self._config, self._serialize, self._deserialize) - self.sku = SkuOperations(self._client, self._config, self._serialize, self._deserialize) - self.subscriptions = SubscriptionsOperations(self._client, self._config, self._serialize, self._deserialize) + self.inputs = InputsOperations(self._client, self._config, self._serialize, self._deserialize) + self.outputs = OutputsOperations(self._client, self._config, self._serialize, self._deserialize) self.transformations = TransformationsOperations(self._client, self._config, self._serialize, self._deserialize) + self.functions = FunctionsOperations(self._client, self._config, self._serialize, self._deserialize) + self.subscriptions = SubscriptionsOperations(self._client, self._config, self._serialize, self._deserialize) self.clusters = ClustersOperations(self._client, self._config, self._serialize, self._deserialize) self.private_endpoints = PrivateEndpointsOperations( self._client, self._config, self._serialize, self._deserialize @@ -117,15 +116,12 @@ def _send_request(self, request: HttpRequest, **kwargs: Any) -> HttpResponse: request_copy.url = self._client.format_url(request_copy.url) return self._client.send_request(request_copy, **kwargs) - def close(self): - # type: () -> None + def close(self) -> None: self._client.close() - def __enter__(self): - # type: () -> StreamAnalyticsManagementClient + def __enter__(self) -> "StreamAnalyticsManagementClient": self._client.__enter__() return self - def __exit__(self, *exc_details): - # type: (Any) -> None + def __exit__(self, *exc_details: Any) -> None: self._client.__exit__(*exc_details) diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_vendor.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_vendor.py index 9aad73fc743e..bd0df84f5319 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_vendor.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_vendor.py @@ -5,6 +5,8 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from typing import List, cast + from azure.core.pipeline.transport import HttpRequest @@ -22,6 +24,7 @@ def _format_url_section(template, **kwargs): try: return template.format(**kwargs) except KeyError as key: - formatted_components = template.split("/") + # Need the cast, as for some reasons "split" is typed as list[str | Any] + formatted_components = cast(List[str], template.split("/")) components = [c for c in formatted_components if "{}".format(key.args[0]) not in c] template = "/".join(components) diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_version.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_version.py index e32dc6ec4218..e5754a47ce68 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_version.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "2.0.0b1" +VERSION = "1.0.0b1" diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/__init__.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/__init__.py index 9ac09f2cb5a8..1fcc98ef2e27 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/__init__.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/__init__.py @@ -10,7 +10,7 @@ try: from ._patch import __all__ as _patch_all - from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import + from ._patch import * # pylint: disable=unused-wildcard-import except ImportError: _patch_all = [] from ._patch import patch_sdk as _patch_sdk diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_configuration.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_configuration.py index 53f46a4c1964..b9f7eaf7f3b4 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_configuration.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_configuration.py @@ -29,10 +29,15 @@ class StreamAnalyticsManagementClientConfiguration(Configuration): # pylint: di :type credential: ~azure.core.credentials_async.AsyncTokenCredential :param subscription_id: The ID of the target subscription. Required. :type subscription_id: str + :keyword api_version: Api Version. Default value is "2020-03-01". Note that overriding this + default value may result in unsupported behavior. + :paramtype api_version: str """ def __init__(self, credential: "AsyncTokenCredential", subscription_id: str, **kwargs: Any) -> None: super(StreamAnalyticsManagementClientConfiguration, self).__init__(**kwargs) + api_version: str = kwargs.pop("api_version", "2020-03-01") + if credential is None: raise ValueError("Parameter 'credential' must not be None.") if subscription_id is None: @@ -40,6 +45,7 @@ def __init__(self, credential: "AsyncTokenCredential", subscription_id: str, **k self.credential = credential self.subscription_id = subscription_id + self.api_version = api_version self.credential_scopes = kwargs.pop("credential_scopes", ["https://management.azure.com/.default"]) kwargs.setdefault("sdk_moniker", "mgmt-streamanalytics/{}".format(VERSION)) self._configure(**kwargs) diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_stream_analytics_management_client.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_stream_analytics_management_client.py index 6e6361e84600..ec4c953ffb87 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_stream_analytics_management_client.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/_stream_analytics_management_client.py @@ -12,7 +12,7 @@ from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.mgmt.core import AsyncARMPipelineClient -from .. import models +from .. import models as _models from .._serialization import Deserializer, Serializer from ._configuration import StreamAnalyticsManagementClientConfiguration from .operations import ( @@ -22,7 +22,6 @@ Operations, OutputsOperations, PrivateEndpointsOperations, - SkuOperations, StreamingJobsOperations, SubscriptionsOperations, TransformationsOperations, @@ -36,22 +35,20 @@ class StreamAnalyticsManagementClient: # pylint: disable=client-accepts-api-version-keyword,too-many-instance-attributes """Stream Analytics Client. - :ivar functions: FunctionsOperations operations - :vartype functions: azure.mgmt.streamanalytics.aio.operations.FunctionsOperations - :ivar inputs: InputsOperations operations - :vartype inputs: azure.mgmt.streamanalytics.aio.operations.InputsOperations - :ivar outputs: OutputsOperations operations - :vartype outputs: azure.mgmt.streamanalytics.aio.operations.OutputsOperations :ivar operations: Operations operations :vartype operations: azure.mgmt.streamanalytics.aio.operations.Operations :ivar streaming_jobs: StreamingJobsOperations operations :vartype streaming_jobs: azure.mgmt.streamanalytics.aio.operations.StreamingJobsOperations - :ivar sku: SkuOperations operations - :vartype sku: azure.mgmt.streamanalytics.aio.operations.SkuOperations - :ivar subscriptions: SubscriptionsOperations operations - :vartype subscriptions: azure.mgmt.streamanalytics.aio.operations.SubscriptionsOperations + :ivar inputs: InputsOperations operations + :vartype inputs: azure.mgmt.streamanalytics.aio.operations.InputsOperations + :ivar outputs: OutputsOperations operations + :vartype outputs: azure.mgmt.streamanalytics.aio.operations.OutputsOperations :ivar transformations: TransformationsOperations operations :vartype transformations: azure.mgmt.streamanalytics.aio.operations.TransformationsOperations + :ivar functions: FunctionsOperations operations + :vartype functions: azure.mgmt.streamanalytics.aio.operations.FunctionsOperations + :ivar subscriptions: SubscriptionsOperations operations + :vartype subscriptions: azure.mgmt.streamanalytics.aio.operations.SubscriptionsOperations :ivar clusters: ClustersOperations operations :vartype clusters: azure.mgmt.streamanalytics.aio.operations.ClustersOperations :ivar private_endpoints: PrivateEndpointsOperations operations @@ -63,6 +60,9 @@ class StreamAnalyticsManagementClient: # pylint: disable=client-accepts-api-ver :type subscription_id: str :param base_url: Service URL. Default value is "https://management.azure.com". :type base_url: str + :keyword api_version: Api Version. Default value is "2020-03-01". Note that overriding this + default value may result in unsupported behavior. + :paramtype api_version: str :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. """ @@ -77,20 +77,19 @@ def __init__( self._config = StreamAnalyticsManagementClientConfiguration( credential=credential, subscription_id=subscription_id, **kwargs ) - self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs) + self._client: AsyncARMPipelineClient = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs) - client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) self._deserialize = Deserializer(client_models) self._serialize.client_side_validation = False - self.functions = FunctionsOperations(self._client, self._config, self._serialize, self._deserialize) - self.inputs = InputsOperations(self._client, self._config, self._serialize, self._deserialize) - self.outputs = OutputsOperations(self._client, self._config, self._serialize, self._deserialize) self.operations = Operations(self._client, self._config, self._serialize, self._deserialize) self.streaming_jobs = StreamingJobsOperations(self._client, self._config, self._serialize, self._deserialize) - self.sku = SkuOperations(self._client, self._config, self._serialize, self._deserialize) - self.subscriptions = SubscriptionsOperations(self._client, self._config, self._serialize, self._deserialize) + self.inputs = InputsOperations(self._client, self._config, self._serialize, self._deserialize) + self.outputs = OutputsOperations(self._client, self._config, self._serialize, self._deserialize) self.transformations = TransformationsOperations(self._client, self._config, self._serialize, self._deserialize) + self.functions = FunctionsOperations(self._client, self._config, self._serialize, self._deserialize) + self.subscriptions = SubscriptionsOperations(self._client, self._config, self._serialize, self._deserialize) self.clusters = ClustersOperations(self._client, self._config, self._serialize, self._deserialize) self.private_endpoints = PrivateEndpointsOperations( self._client, self._config, self._serialize, self._deserialize @@ -125,5 +124,5 @@ async def __aenter__(self) -> "StreamAnalyticsManagementClient": await self._client.__aenter__() return self - async def __aexit__(self, *exc_details) -> None: + async def __aexit__(self, *exc_details: Any) -> None: await self._client.__aexit__(*exc_details) diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/__init__.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/__init__.py index 0edde7ab94af..59ddeae83d21 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/__init__.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/__init__.py @@ -6,30 +6,28 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from ._functions_operations import FunctionsOperations -from ._inputs_operations import InputsOperations -from ._outputs_operations import OutputsOperations from ._operations import Operations from ._streaming_jobs_operations import StreamingJobsOperations -from ._sku_operations import SkuOperations -from ._subscriptions_operations import SubscriptionsOperations +from ._inputs_operations import InputsOperations +from ._outputs_operations import OutputsOperations from ._transformations_operations import TransformationsOperations +from ._functions_operations import FunctionsOperations +from ._subscriptions_operations import SubscriptionsOperations from ._clusters_operations import ClustersOperations from ._private_endpoints_operations import PrivateEndpointsOperations from ._patch import __all__ as _patch_all -from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import +from ._patch import * # pylint: disable=unused-wildcard-import from ._patch import patch_sdk as _patch_sdk __all__ = [ - "FunctionsOperations", - "InputsOperations", - "OutputsOperations", "Operations", "StreamingJobsOperations", - "SkuOperations", - "SubscriptionsOperations", + "InputsOperations", + "OutputsOperations", "TransformationsOperations", + "FunctionsOperations", + "SubscriptionsOperations", "ClustersOperations", "PrivateEndpointsOperations", ] diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_clusters_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_clusters_operations.py index ff04096b8e82..b9178d2346e6 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_clusters_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_clusters_operations.py @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -40,10 +41,6 @@ build_update_request, ) -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -87,16 +84,14 @@ async def _create_or_update_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2020-03-01-preview") - ) # type: Literal["2020-03-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.Cluster] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Cluster] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(cluster, (IO, bytes)): + if isinstance(cluster, (IOBase, bytes)): _content = cluster else: _json = self._serialize.body(cluster, "Cluster") @@ -116,10 +111,11 @@ async def _create_or_update_initial( params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -136,11 +132,13 @@ async def _create_or_update_initial( deserialized = self._deserialize("Cluster", pipeline_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized + return deserialized # type: ignore - _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}"} # type: ignore + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}" + } @overload async def begin_create_or_update( @@ -254,7 +252,7 @@ async def begin_create_or_update( :param cluster_name: The name of the cluster. Required. :type cluster_name: str :param cluster: The definition of the cluster that will be used to create a new cluster or - replace the existing one. Is either a model type or a IO type. Required. + replace the existing one. Is either a Cluster type or a IO type. Required. :type cluster: ~azure.mgmt.streamanalytics.models.Cluster or IO :param if_match: The ETag of the resource. Omit this value to always overwrite the current record set. Specify the last-seen ETag value to prevent accidentally overwriting concurrent @@ -283,16 +281,14 @@ async def begin_create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2020-03-01-preview") - ) # type: Literal["2020-03-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.Cluster] - polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Cluster] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._create_or_update_initial( # type: ignore + raw_result = await self._create_or_update_initial( resource_group_name=resource_group_name, cluster_name=cluster_name, cluster=cluster, @@ -314,7 +310,7 @@ def get_long_running_output(pipeline_response): return deserialized if polling is True: - polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) else: @@ -326,9 +322,11 @@ def get_long_running_output(pipeline_response): client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}" + } async def _update_initial( self, @@ -349,16 +347,14 @@ async def _update_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2020-03-01-preview") - ) # type: Literal["2020-03-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.Cluster]] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.Cluster]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(cluster, (IO, bytes)): + if isinstance(cluster, (IOBase, bytes)): _content = cluster else: _json = self._serialize.body(cluster, "Cluster") @@ -377,10 +373,11 @@ async def _update_initial( params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -399,7 +396,9 @@ async def _update_initial( return deserialized - _update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}"} # type: ignore + _update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}" + } @overload async def begin_update( @@ -505,7 +504,7 @@ async def begin_update( :param cluster_name: The name of the cluster. Required. :type cluster_name: str :param cluster: The properties specified here will overwrite the corresponding properties in - the existing cluster (ie. Those properties will be updated). Is either a model type or a IO + the existing cluster (ie. Those properties will be updated). Is either a Cluster type or a IO type. Required. :type cluster: ~azure.mgmt.streamanalytics.models.Cluster or IO :param if_match: The ETag of the resource. Omit this value to always overwrite the current @@ -531,16 +530,14 @@ async def begin_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2020-03-01-preview") - ) # type: Literal["2020-03-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.Cluster] - polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Cluster] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._update_initial( # type: ignore + raw_result = await self._update_initial( resource_group_name=resource_group_name, cluster_name=cluster_name, cluster=cluster, @@ -561,7 +558,7 @@ def get_long_running_output(pipeline_response): return deserialized if polling is True: - polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) else: @@ -573,9 +570,11 @@ def get_long_running_output(pipeline_response): client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}"} # type: ignore + begin_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}" + } @distributed_trace_async async def get(self, resource_group_name: str, cluster_name: str, **kwargs: Any) -> _models.Cluster: @@ -602,10 +601,8 @@ async def get(self, resource_group_name: str, cluster_name: str, **kwargs: Any) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2020-03-01-preview") - ) # type: Literal["2020-03-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[_models.Cluster] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.Cluster] = kwargs.pop("cls", None) request = build_get_request( resource_group_name=resource_group_name, @@ -617,10 +614,11 @@ async def get(self, resource_group_name: str, cluster_name: str, **kwargs: Any) params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -637,7 +635,9 @@ async def get(self, resource_group_name: str, cluster_name: str, **kwargs: Any) return deserialized - get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}"} # type: ignore + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}" + } async def _delete_initial( # pylint: disable=inconsistent-return-statements self, resource_group_name: str, cluster_name: str, **kwargs: Any @@ -653,10 +653,8 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2020-03-01-preview") - ) # type: Literal["2020-03-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[None] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) request = build_delete_request( resource_group_name=resource_group_name, @@ -668,10 +666,11 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -684,7 +683,9 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}"} # type: ignore + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}" + } @distributed_trace_async async def begin_delete(self, resource_group_name: str, cluster_name: str, **kwargs: Any) -> AsyncLROPoller[None]: @@ -710,13 +711,11 @@ async def begin_delete(self, resource_group_name: str, cluster_name: str, **kwar _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2020-03-01-preview") - ) # type: Literal["2020-03-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[None] - polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._delete_initial( # type: ignore resource_group_name=resource_group_name, @@ -734,7 +733,7 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- return cls(pipeline_response, None, {}) if polling is True: - polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) else: @@ -746,9 +745,11 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}" + } @distributed_trace def list_by_subscription(self, **kwargs: Any) -> AsyncIterable["_models.Cluster"]: @@ -762,10 +763,8 @@ def list_by_subscription(self, **kwargs: Any) -> AsyncIterable["_models.Cluster" _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2020-03-01-preview") - ) # type: Literal["2020-03-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[_models.ClusterListResult] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ClusterListResult] = kwargs.pop("cls", None) error_map = { 401: ClientAuthenticationError, @@ -786,12 +785,23 @@ def prepare_request(next_link=None): params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) else: - request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) request.method = "GET" return request @@ -799,14 +809,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("ClusterListResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -819,7 +830,9 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) - list_by_subscription.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/clusters"} # type: ignore + list_by_subscription.metadata = { + "url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/clusters" + } @distributed_trace def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> AsyncIterable["_models.Cluster"]: @@ -836,10 +849,8 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Asy _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2020-03-01-preview") - ) # type: Literal["2020-03-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[_models.ClusterListResult] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ClusterListResult] = kwargs.pop("cls", None) error_map = { 401: ClientAuthenticationError, @@ -861,12 +872,23 @@ def prepare_request(next_link=None): params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) else: - request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) request.method = "GET" return request @@ -874,14 +896,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("ClusterListResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -894,7 +917,9 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) - list_by_resource_group.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters"} # type: ignore + list_by_resource_group.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters" + } @distributed_trace def list_streaming_jobs( @@ -915,10 +940,8 @@ def list_streaming_jobs( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2020-03-01-preview") - ) # type: Literal["2020-03-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[_models.ClusterJobListResult] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ClusterJobListResult] = kwargs.pop("cls", None) error_map = { 401: ClientAuthenticationError, @@ -941,12 +964,23 @@ def prepare_request(next_link=None): params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) else: - request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) request.method = "GET" return request @@ -954,14 +988,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("ClusterJobListResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -974,4 +1009,6 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) - list_streaming_jobs.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/listStreamingJobs"} # type: ignore + list_streaming_jobs.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/listStreamingJobs" + } diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_functions_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_functions_operations.py index bb2838231b84..98c09c721c8b 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_functions_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_functions_operations.py @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -40,10 +41,6 @@ build_update_request, ) -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -172,7 +169,7 @@ async def create_or_replace( :param function_name: The name of the function. Required. :type function_name: str :param function: The definition of the function that will be used to create a new function or - replace the existing one under the streaming job. Is either a model type or a IO type. + replace the existing one under the streaming job. Is either a Function type or a IO type. Required. :type function: ~azure.mgmt.streamanalytics.models.Function or IO :param if_match: The ETag of the function. Omit this value to always overwrite the current @@ -202,16 +199,14 @@ async def create_or_replace( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.Function] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Function] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(function, (IO, bytes)): + if isinstance(function, (IOBase, bytes)): _content = function else: _json = self._serialize.body(function, "Function") @@ -232,10 +227,11 @@ async def create_or_replace( params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -257,11 +253,13 @@ async def create_or_replace( deserialized = self._deserialize("Function", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore - create_or_replace.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}"} # type: ignore + create_or_replace.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}" + } @overload async def update( @@ -370,7 +368,7 @@ async def update( corresponding properties in the existing function (ie. Those properties will be updated). Any properties that are set to null here will mean that the corresponding property in the existing function will remain the same and not change as a result of this PATCH operation. Is either a - model type or a IO type. Required. + Function type or a IO type. Required. :type function: ~azure.mgmt.streamanalytics.models.Function or IO :param if_match: The ETag of the function. Omit this value to always overwrite the current function. Specify the last-seen ETag value to prevent accidentally overwriting concurrent @@ -395,16 +393,14 @@ async def update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.Function] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Function] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(function, (IO, bytes)): + if isinstance(function, (IOBase, bytes)): _content = function else: _json = self._serialize.body(function, "Function") @@ -424,10 +420,11 @@ async def update( params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -447,7 +444,9 @@ async def update( return deserialized - update.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}"} # type: ignore + update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}" + } @distributed_trace_async async def delete( # pylint: disable=inconsistent-return-statements @@ -478,10 +477,8 @@ async def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[None] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) request = build_delete_request( resource_group_name=resource_group_name, @@ -494,10 +491,11 @@ async def delete( # pylint: disable=inconsistent-return-statements params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -510,7 +508,9 @@ async def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}"} # type: ignore + delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}" + } @distributed_trace_async async def get(self, resource_group_name: str, job_name: str, function_name: str, **kwargs: Any) -> _models.Function: @@ -539,10 +539,8 @@ async def get(self, resource_group_name: str, job_name: str, function_name: str, _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[_models.Function] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.Function] = kwargs.pop("cls", None) request = build_get_request( resource_group_name=resource_group_name, @@ -555,10 +553,11 @@ async def get(self, resource_group_name: str, job_name: str, function_name: str, params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -578,7 +577,9 @@ async def get(self, resource_group_name: str, job_name: str, function_name: str, return deserialized - get.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}"} # type: ignore + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}" + } @distributed_trace def list_by_streaming_job( @@ -604,10 +605,8 @@ def list_by_streaming_job( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[_models.FunctionListResult] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.FunctionListResult] = kwargs.pop("cls", None) error_map = { 401: ClientAuthenticationError, @@ -631,12 +630,23 @@ def prepare_request(next_link=None): params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) else: - request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) request.method = "GET" return request @@ -644,14 +654,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("FunctionListResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -664,7 +675,9 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) - list_by_streaming_job.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions"} # type: ignore + list_by_streaming_job.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions" + } async def _test_initial( self, @@ -685,16 +698,14 @@ async def _test_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.ResourceTestStatus]] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.ResourceTestStatus]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(function, (IO, bytes)): + if isinstance(function, (IOBase, bytes)): _content = function else: if function is not None: @@ -716,10 +727,11 @@ async def _test_initial( params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -738,7 +750,9 @@ async def _test_initial( return deserialized - _test_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}/test"} # type: ignore + _test_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}/test" + } @overload async def begin_test( @@ -858,8 +872,8 @@ async def begin_test( the full function definition intended to be tested. If the function specified already exists, this parameter can be left null to test the existing function as is or if specified, the properties specified will overwrite the corresponding properties in the existing function - (exactly like a PATCH operation) and the resulting function will be tested. Is either a model - type or a IO type. Default value is None. + (exactly like a PATCH operation) and the resulting function will be tested. Is either a + Function type or a IO type. Default value is None. :type function: ~azure.mgmt.streamanalytics.models.Function or IO :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. Default value is None. @@ -881,16 +895,14 @@ async def begin_test( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.ResourceTestStatus] - polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ResourceTestStatus] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._test_initial( # type: ignore + raw_result = await self._test_initial( resource_group_name=resource_group_name, job_name=job_name, function_name=function_name, @@ -911,7 +923,7 @@ def get_long_running_output(pipeline_response): return deserialized if polling is True: - polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) else: @@ -923,9 +935,11 @@ def get_long_running_output(pipeline_response): client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_test.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}/test"} # type: ignore + begin_test.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}/test" + } @overload async def retrieve_default_definition( @@ -1015,8 +1029,8 @@ async def retrieve_default_definition( :param function_name: The name of the function. Required. :type function_name: str :param function_retrieve_default_definition_parameters: Parameters used to specify the type of - function to retrieve the default definition for. Is either a model type or a IO type. Default - value is None. + function to retrieve the default definition for. Is either a + FunctionRetrieveDefaultDefinitionParameters type or a IO type. Default value is None. :type function_retrieve_default_definition_parameters: ~azure.mgmt.streamanalytics.models.FunctionRetrieveDefaultDefinitionParameters or IO :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. @@ -1038,16 +1052,14 @@ async def retrieve_default_definition( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.Function] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Function] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(function_retrieve_default_definition_parameters, (IO, bytes)): + if isinstance(function_retrieve_default_definition_parameters, (IOBase, bytes)): _content = function_retrieve_default_definition_parameters else: if function_retrieve_default_definition_parameters is not None: @@ -1071,10 +1083,11 @@ async def retrieve_default_definition( params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1091,4 +1104,6 @@ async def retrieve_default_definition( return deserialized - retrieve_default_definition.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}/retrieveDefaultDefinition"} # type: ignore + retrieve_default_definition.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}/retrieveDefaultDefinition" + } diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_inputs_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_inputs_operations.py index 52df56456794..bdd5ac35c08c 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_inputs_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_inputs_operations.py @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -39,10 +40,6 @@ build_update_request, ) -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -171,7 +168,7 @@ async def create_or_replace( :param input_name: The name of the input. Required. :type input_name: str :param input: The definition of the input that will be used to create a new input or replace - the existing one under the streaming job. Is either a model type or a IO type. Required. + the existing one under the streaming job. Is either a Input type or a IO type. Required. :type input: ~azure.mgmt.streamanalytics.models.Input or IO :param if_match: The ETag of the input. Omit this value to always overwrite the current input. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. @@ -200,16 +197,14 @@ async def create_or_replace( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.Input] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Input] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(input, (IO, bytes)): + if isinstance(input, (IOBase, bytes)): _content = input else: _json = self._serialize.body(input, "Input") @@ -230,10 +225,11 @@ async def create_or_replace( params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -255,11 +251,13 @@ async def create_or_replace( deserialized = self._deserialize("Input", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore - create_or_replace.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}"} # type: ignore + create_or_replace.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}" + } @overload async def update( @@ -367,7 +365,7 @@ async def update( :param input: An Input object. The properties specified here will overwrite the corresponding properties in the existing input (ie. Those properties will be updated). Any properties that are set to null here will mean that the corresponding property in the existing input will - remain the same and not change as a result of this PATCH operation. Is either a model type or a + remain the same and not change as a result of this PATCH operation. Is either a Input type or a IO type. Required. :type input: ~azure.mgmt.streamanalytics.models.Input or IO :param if_match: The ETag of the input. Omit this value to always overwrite the current input. @@ -393,16 +391,14 @@ async def update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.Input] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Input] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(input, (IO, bytes)): + if isinstance(input, (IOBase, bytes)): _content = input else: _json = self._serialize.body(input, "Input") @@ -422,10 +418,11 @@ async def update( params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -445,7 +442,9 @@ async def update( return deserialized - update.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}"} # type: ignore + update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}" + } @distributed_trace_async async def delete( # pylint: disable=inconsistent-return-statements @@ -476,10 +475,8 @@ async def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[None] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) request = build_delete_request( resource_group_name=resource_group_name, @@ -492,10 +489,11 @@ async def delete( # pylint: disable=inconsistent-return-statements params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -508,7 +506,9 @@ async def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}"} # type: ignore + delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}" + } @distributed_trace_async async def get(self, resource_group_name: str, job_name: str, input_name: str, **kwargs: Any) -> _models.Input: @@ -537,10 +537,8 @@ async def get(self, resource_group_name: str, job_name: str, input_name: str, ** _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[_models.Input] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.Input] = kwargs.pop("cls", None) request = build_get_request( resource_group_name=resource_group_name, @@ -553,10 +551,11 @@ async def get(self, resource_group_name: str, job_name: str, input_name: str, ** params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -576,7 +575,9 @@ async def get(self, resource_group_name: str, job_name: str, input_name: str, ** return deserialized - get.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}"} # type: ignore + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}" + } @distributed_trace def list_by_streaming_job( @@ -602,10 +603,8 @@ def list_by_streaming_job( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[_models.InputListResult] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.InputListResult] = kwargs.pop("cls", None) error_map = { 401: ClientAuthenticationError, @@ -629,12 +628,23 @@ def prepare_request(next_link=None): params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) else: - request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) request.method = "GET" return request @@ -642,14 +652,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("InputListResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -662,7 +673,9 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) - list_by_streaming_job.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs"} # type: ignore + list_by_streaming_job.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs" + } async def _test_initial( self, @@ -683,16 +696,14 @@ async def _test_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.ResourceTestStatus]] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.ResourceTestStatus]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(input, (IO, bytes)): + if isinstance(input, (IOBase, bytes)): _content = input else: if input is not None: @@ -714,10 +725,11 @@ async def _test_initial( params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -736,7 +748,9 @@ async def _test_initial( return deserialized - _test_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}/test"} # type: ignore + _test_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}/test" + } @overload async def begin_test( @@ -851,7 +865,7 @@ async def begin_test( full input definition intended to be tested. If the input specified already exists, this parameter can be left null to test the existing input as is or if specified, the properties specified will overwrite the corresponding properties in the existing input (exactly like a - PATCH operation) and the resulting input will be tested. Is either a model type or a IO type. + PATCH operation) and the resulting input will be tested. Is either a Input type or a IO type. Default value is None. :type input: ~azure.mgmt.streamanalytics.models.Input or IO :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. @@ -874,16 +888,14 @@ async def begin_test( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.ResourceTestStatus] - polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ResourceTestStatus] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._test_initial( # type: ignore + raw_result = await self._test_initial( resource_group_name=resource_group_name, job_name=job_name, input_name=input_name, @@ -904,7 +916,7 @@ def get_long_running_output(pipeline_response): return deserialized if polling is True: - polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) else: @@ -916,6 +928,8 @@ def get_long_running_output(pipeline_response): client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_test.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}/test"} # type: ignore + begin_test.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}/test" + } diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_operations.py index 82cc4b14c77b..7bc764b047b8 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_operations.py @@ -6,8 +6,8 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -29,10 +29,6 @@ from ..._vendor import _convert_request from ...operations._operations import build_list_request -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -68,10 +64,8 @@ def list(self, **kwargs: Any) -> AsyncIterable["_models.Operation"]: _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[_models.OperationListResult] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.OperationListResult] = kwargs.pop("cls", None) error_map = { 401: ClientAuthenticationError, @@ -91,12 +85,23 @@ def prepare_request(next_link=None): params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) else: - request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) request.method = "GET" return request @@ -104,14 +109,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("OperationListResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -124,4 +130,4 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) - list.metadata = {"url": "/providers/Microsoft.StreamAnalytics/operations"} # type: ignore + list.metadata = {"url": "/providers/Microsoft.StreamAnalytics/operations"} diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_outputs_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_outputs_operations.py index a4bd1b3f9b25..e3eb1b0b197f 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_outputs_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_outputs_operations.py @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -39,10 +40,6 @@ build_update_request, ) -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -171,7 +168,7 @@ async def create_or_replace( :param output_name: The name of the output. Required. :type output_name: str :param output: The definition of the output that will be used to create a new output or replace - the existing one under the streaming job. Is either a model type or a IO type. Required. + the existing one under the streaming job. Is either a Output type or a IO type. Required. :type output: ~azure.mgmt.streamanalytics.models.Output or IO :param if_match: The ETag of the output. Omit this value to always overwrite the current output. Specify the last-seen ETag value to prevent accidentally overwriting concurrent @@ -200,16 +197,14 @@ async def create_or_replace( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.Output] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Output] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(output, (IO, bytes)): + if isinstance(output, (IOBase, bytes)): _content = output else: _json = self._serialize.body(output, "Output") @@ -230,10 +225,11 @@ async def create_or_replace( params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -255,11 +251,13 @@ async def create_or_replace( deserialized = self._deserialize("Output", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore - create_or_replace.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}"} # type: ignore + create_or_replace.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}" + } @overload async def update( @@ -367,8 +365,8 @@ async def update( :param output: An Output object. The properties specified here will overwrite the corresponding properties in the existing output (ie. Those properties will be updated). Any properties that are set to null here will mean that the corresponding property in the existing output will - remain the same and not change as a result of this PATCH operation. Is either a model type or a - IO type. Required. + remain the same and not change as a result of this PATCH operation. Is either a Output type or + a IO type. Required. :type output: ~azure.mgmt.streamanalytics.models.Output or IO :param if_match: The ETag of the output. Omit this value to always overwrite the current output. Specify the last-seen ETag value to prevent accidentally overwriting concurrent @@ -393,16 +391,14 @@ async def update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.Output] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Output] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(output, (IO, bytes)): + if isinstance(output, (IOBase, bytes)): _content = output else: _json = self._serialize.body(output, "Output") @@ -422,10 +418,11 @@ async def update( params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -445,7 +442,9 @@ async def update( return deserialized - update.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}"} # type: ignore + update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}" + } @distributed_trace_async async def delete( # pylint: disable=inconsistent-return-statements @@ -476,10 +475,8 @@ async def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[None] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) request = build_delete_request( resource_group_name=resource_group_name, @@ -492,10 +489,11 @@ async def delete( # pylint: disable=inconsistent-return-statements params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -508,7 +506,9 @@ async def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}"} # type: ignore + delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}" + } @distributed_trace_async async def get(self, resource_group_name: str, job_name: str, output_name: str, **kwargs: Any) -> _models.Output: @@ -537,10 +537,8 @@ async def get(self, resource_group_name: str, job_name: str, output_name: str, * _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[_models.Output] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.Output] = kwargs.pop("cls", None) request = build_get_request( resource_group_name=resource_group_name, @@ -553,10 +551,11 @@ async def get(self, resource_group_name: str, job_name: str, output_name: str, * params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -576,7 +575,9 @@ async def get(self, resource_group_name: str, job_name: str, output_name: str, * return deserialized - get.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}"} # type: ignore + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}" + } @distributed_trace def list_by_streaming_job( @@ -602,10 +603,8 @@ def list_by_streaming_job( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[_models.OutputListResult] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.OutputListResult] = kwargs.pop("cls", None) error_map = { 401: ClientAuthenticationError, @@ -629,12 +628,23 @@ def prepare_request(next_link=None): params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) else: - request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) request.method = "GET" return request @@ -642,14 +652,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("OutputListResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -662,7 +673,9 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) - list_by_streaming_job.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs"} # type: ignore + list_by_streaming_job.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs" + } async def _test_initial( self, @@ -683,16 +696,14 @@ async def _test_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.ResourceTestStatus]] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.ResourceTestStatus]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(output, (IO, bytes)): + if isinstance(output, (IOBase, bytes)): _content = output else: if output is not None: @@ -714,10 +725,11 @@ async def _test_initial( params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -736,7 +748,9 @@ async def _test_initial( return deserialized - _test_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}/test"} # type: ignore + _test_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}/test" + } @overload async def begin_test( @@ -851,7 +865,7 @@ async def begin_test( full output definition intended to be tested. If the output specified already exists, this parameter can be left null to test the existing output as is or if specified, the properties specified will overwrite the corresponding properties in the existing output (exactly like a - PATCH operation) and the resulting output will be tested. Is either a model type or a IO type. + PATCH operation) and the resulting output will be tested. Is either a Output type or a IO type. Default value is None. :type output: ~azure.mgmt.streamanalytics.models.Output or IO :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. @@ -874,16 +888,14 @@ async def begin_test( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.ResourceTestStatus] - polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ResourceTestStatus] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._test_initial( # type: ignore + raw_result = await self._test_initial( resource_group_name=resource_group_name, job_name=job_name, output_name=output_name, @@ -904,7 +916,7 @@ def get_long_running_output(pipeline_response): return deserialized if polling is True: - polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) else: @@ -916,6 +928,8 @@ def get_long_running_output(pipeline_response): client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_test.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}/test"} # type: ignore + begin_test.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}/test" + } diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_private_endpoints_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_private_endpoints_operations.py index 1b8147647149..b2e64bcfb580 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_private_endpoints_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_private_endpoints_operations.py @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -37,10 +38,6 @@ build_list_by_cluster_request, ) -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -169,7 +166,8 @@ async def create_or_update( :param private_endpoint_name: The name of the private endpoint. Required. :type private_endpoint_name: str :param private_endpoint: The definition of the private endpoint that will be used to create a - new cluster or replace the existing one. Is either a model type or a IO type. Required. + new cluster or replace the existing one. Is either a PrivateEndpoint type or a IO type. + Required. :type private_endpoint: ~azure.mgmt.streamanalytics.models.PrivateEndpoint or IO :param if_match: The ETag of the resource. Omit this value to always overwrite the current record set. Specify the last-seen ETag value to prevent accidentally overwriting concurrent @@ -198,16 +196,14 @@ async def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2020-03-01-preview") - ) # type: Literal["2020-03-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.PrivateEndpoint] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.PrivateEndpoint] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(private_endpoint, (IO, bytes)): + if isinstance(private_endpoint, (IOBase, bytes)): _content = private_endpoint else: _json = self._serialize.body(private_endpoint, "PrivateEndpoint") @@ -228,10 +224,11 @@ async def create_or_update( params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -248,11 +245,13 @@ async def create_or_update( deserialized = self._deserialize("PrivateEndpoint", pipeline_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized + return deserialized # type: ignore - create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}"} # type: ignore + create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}" + } @distributed_trace_async async def get( @@ -283,10 +282,8 @@ async def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2020-03-01-preview") - ) # type: Literal["2020-03-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[_models.PrivateEndpoint] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.PrivateEndpoint] = kwargs.pop("cls", None) request = build_get_request( resource_group_name=resource_group_name, @@ -299,10 +296,11 @@ async def get( params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -319,7 +317,9 @@ async def get( return deserialized - get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}"} # type: ignore + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}" + } async def _delete_initial( # pylint: disable=inconsistent-return-statements self, resource_group_name: str, cluster_name: str, private_endpoint_name: str, **kwargs: Any @@ -335,10 +335,8 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2020-03-01-preview") - ) # type: Literal["2020-03-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[None] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) request = build_delete_request( resource_group_name=resource_group_name, @@ -351,10 +349,11 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -367,7 +366,9 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}"} # type: ignore + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}" + } @distributed_trace_async async def begin_delete( @@ -397,13 +398,11 @@ async def begin_delete( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2020-03-01-preview") - ) # type: Literal["2020-03-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[None] - polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._delete_initial( # type: ignore resource_group_name=resource_group_name, @@ -422,7 +421,7 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- return cls(pipeline_response, None, {}) if polling is True: - polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) else: @@ -434,9 +433,11 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}" + } @distributed_trace def list_by_cluster( @@ -458,10 +459,8 @@ def list_by_cluster( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2020-03-01-preview") - ) # type: Literal["2020-03-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[_models.PrivateEndpointListResult] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.PrivateEndpointListResult] = kwargs.pop("cls", None) error_map = { 401: ClientAuthenticationError, @@ -484,12 +483,23 @@ def prepare_request(next_link=None): params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) else: - request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) request.method = "GET" return request @@ -497,14 +507,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("PrivateEndpointListResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -517,4 +528,6 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) - list_by_cluster.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints"} # type: ignore + list_by_cluster.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints" + } diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_sku_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_sku_operations.py deleted file mode 100644 index 9b261f773c6b..000000000000 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_sku_operations.py +++ /dev/null @@ -1,139 +0,0 @@ -# pylint: disable=too-many-lines -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -import sys -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar - -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest -from azure.core.tracing.decorator import distributed_trace -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models as _models -from ..._vendor import _convert_request -from ...operations._sku_operations import build_list_request - -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - - -class SkuOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.streamanalytics.aio.StreamAnalyticsManagementClient`'s - :attr:`sku` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def list( - self, resource_group_name: str, job_name: str, **kwargs: Any - ) -> AsyncIterable["_models.GetStreamingJobSkuResult"]: - """Gets a list of available SKUs about the specified streaming job. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - Required. - :type resource_group_name: str - :param job_name: The name of the streaming job. Required. - :type job_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either GetStreamingJobSkuResult or the result of - cls(response) - :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.streamanalytics.models.GetStreamingJobSkuResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[_models.GetStreamingJobSkuResults] - - error_map = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - request = build_list_request( - resource_group_name=resource_group_name, - job_name=job_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - template_url=self.list.metadata["url"], - headers=_headers, - params=_params, - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore - - else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore - request.method = "GET" - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize("GetStreamingJobSkuResults", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged(get_next, extract_data) - - list.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/skus"} # type: ignore diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_streaming_jobs_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_streaming_jobs_operations.py index 2a28b639f3ab..63aac724aaaa 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_streaming_jobs_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_streaming_jobs_operations.py @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -42,10 +43,6 @@ build_update_request, ) -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -89,16 +86,14 @@ async def _create_or_replace_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.StreamingJob] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.StreamingJob] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(streaming_job, (IO, bytes)): + if isinstance(streaming_job, (IOBase, bytes)): _content = streaming_job else: _json = self._serialize.body(streaming_job, "StreamingJob") @@ -118,10 +113,11 @@ async def _create_or_replace_initial( params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -143,11 +139,13 @@ async def _create_or_replace_initial( deserialized = self._deserialize("StreamingJob", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore - _create_or_replace_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}"} # type: ignore + _create_or_replace_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}" + } @overload async def begin_create_or_replace( @@ -261,7 +259,8 @@ async def begin_create_or_replace( :param job_name: The name of the streaming job. Required. :type job_name: str :param streaming_job: The definition of the streaming job that will be used to create a new - streaming job or replace the existing one. Is either a model type or a IO type. Required. + streaming job or replace the existing one. Is either a StreamingJob type or a IO type. + Required. :type streaming_job: ~azure.mgmt.streamanalytics.models.StreamingJob or IO :param if_match: The ETag of the streaming job. Omit this value to always overwrite the current record set. Specify the last-seen ETag value to prevent accidentally overwriting concurrent @@ -290,16 +289,14 @@ async def begin_create_or_replace( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.StreamingJob] - polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.StreamingJob] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._create_or_replace_initial( # type: ignore + raw_result = await self._create_or_replace_initial( resource_group_name=resource_group_name, job_name=job_name, streaming_job=streaming_job, @@ -325,7 +322,7 @@ def get_long_running_output(pipeline_response): return deserialized if polling is True: - polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) else: @@ -337,9 +334,11 @@ def get_long_running_output(pipeline_response): client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_replace.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}"} # type: ignore + begin_create_or_replace.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}" + } @overload async def update( @@ -438,7 +437,7 @@ async def update( corresponding properties in the existing streaming job (ie. Those properties will be updated). Any properties that are set to null here will mean that the corresponding property in the existing input will remain the same and not change as a result of this PATCH operation. Is - either a model type or a IO type. Required. + either a StreamingJob type or a IO type. Required. :type streaming_job: ~azure.mgmt.streamanalytics.models.StreamingJob or IO :param if_match: The ETag of the streaming job. Omit this value to always overwrite the current record set. Specify the last-seen ETag value to prevent accidentally overwriting concurrent @@ -463,16 +462,14 @@ async def update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.StreamingJob] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.StreamingJob] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(streaming_job, (IO, bytes)): + if isinstance(streaming_job, (IOBase, bytes)): _content = streaming_job else: _json = self._serialize.body(streaming_job, "StreamingJob") @@ -491,10 +488,11 @@ async def update( params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -514,7 +512,9 @@ async def update( return deserialized - update.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}"} # type: ignore + update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}" + } async def _delete_initial( # pylint: disable=inconsistent-return-statements self, resource_group_name: str, job_name: str, **kwargs: Any @@ -530,10 +530,8 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[None] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) request = build_delete_request( resource_group_name=resource_group_name, @@ -545,10 +543,11 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -561,7 +560,9 @@ async def _delete_initial( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}"} # type: ignore + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}" + } @distributed_trace_async async def begin_delete(self, resource_group_name: str, job_name: str, **kwargs: Any) -> AsyncLROPoller[None]: @@ -587,13 +588,11 @@ async def begin_delete(self, resource_group_name: str, job_name: str, **kwargs: _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[None] - polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._delete_initial( # type: ignore resource_group_name=resource_group_name, @@ -611,7 +610,7 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- return cls(pipeline_response, None, {}) if polling is True: - polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) else: @@ -623,9 +622,11 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}" + } @distributed_trace_async async def get( @@ -659,10 +660,8 @@ async def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[_models.StreamingJob] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.StreamingJob] = kwargs.pop("cls", None) request = build_get_request( resource_group_name=resource_group_name, @@ -675,10 +674,11 @@ async def get( params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -698,7 +698,9 @@ async def get( return deserialized - get.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}"} # type: ignore + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}" + } @distributed_trace def list_by_resource_group( @@ -723,10 +725,8 @@ def list_by_resource_group( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[_models.StreamingJobListResult] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.StreamingJobListResult] = kwargs.pop("cls", None) error_map = { 401: ClientAuthenticationError, @@ -749,12 +749,23 @@ def prepare_request(next_link=None): params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) else: - request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) request.method = "GET" return request @@ -762,14 +773,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("StreamingJobListResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -782,7 +794,9 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) - list_by_resource_group.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs"} # type: ignore + list_by_resource_group.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs" + } @distributed_trace def list(self, expand: Optional[str] = None, **kwargs: Any) -> AsyncIterable["_models.StreamingJob"]: @@ -802,10 +816,8 @@ def list(self, expand: Optional[str] = None, **kwargs: Any) -> AsyncIterable["_m _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[_models.StreamingJobListResult] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.StreamingJobListResult] = kwargs.pop("cls", None) error_map = { 401: ClientAuthenticationError, @@ -827,12 +839,23 @@ def prepare_request(next_link=None): params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) else: - request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) request.method = "GET" return request @@ -840,14 +863,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("StreamingJobListResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -860,7 +884,7 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) - list.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/streamingjobs"} # type: ignore + list.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/streamingjobs"} async def _start_initial( # pylint: disable=inconsistent-return-statements self, @@ -880,16 +904,14 @@ async def _start_initial( # pylint: disable=inconsistent-return-statements _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[None] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(start_job_parameters, (IO, bytes)): + if isinstance(start_job_parameters, (IOBase, bytes)): _content = start_job_parameters else: if start_job_parameters is not None: @@ -910,10 +932,11 @@ async def _start_initial( # pylint: disable=inconsistent-return-statements params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -926,7 +949,9 @@ async def _start_initial( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - _start_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/start"} # type: ignore + _start_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/start" + } @overload async def begin_start( @@ -1019,7 +1044,7 @@ async def begin_start( :param job_name: The name of the streaming job. Required. :type job_name: str :param start_job_parameters: Parameters applicable to a start streaming job operation. Is - either a model type or a IO type. Default value is None. + either a StartStreamingJobParameters type or a IO type. Default value is None. :type start_job_parameters: ~azure.mgmt.streamanalytics.models.StartStreamingJobParameters or IO :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. @@ -1040,14 +1065,12 @@ async def begin_start( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[None] - polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._start_initial( # type: ignore resource_group_name=resource_group_name, @@ -1067,7 +1090,7 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- return cls(pipeline_response, None, {}) if polling is True: - polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) else: @@ -1079,9 +1102,11 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_start.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/start"} # type: ignore + begin_start.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/start" + } async def _stop_initial( # pylint: disable=inconsistent-return-statements self, resource_group_name: str, job_name: str, **kwargs: Any @@ -1097,10 +1122,8 @@ async def _stop_initial( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[None] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) request = build_stop_request( resource_group_name=resource_group_name, @@ -1112,10 +1135,11 @@ async def _stop_initial( # pylint: disable=inconsistent-return-statements params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1128,7 +1152,9 @@ async def _stop_initial( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - _stop_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/stop"} # type: ignore + _stop_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/stop" + } @distributed_trace_async async def begin_stop(self, resource_group_name: str, job_name: str, **kwargs: Any) -> AsyncLROPoller[None]: @@ -1155,13 +1181,11 @@ async def begin_stop(self, resource_group_name: str, job_name: str, **kwargs: An _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[None] - polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._stop_initial( # type: ignore resource_group_name=resource_group_name, @@ -1179,7 +1203,7 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- return cls(pipeline_response, None, {}) if polling is True: - polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) else: @@ -1191,9 +1215,11 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_stop.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/stop"} # type: ignore + begin_stop.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/stop" + } async def _scale_initial( # pylint: disable=inconsistent-return-statements self, @@ -1213,16 +1239,14 @@ async def _scale_initial( # pylint: disable=inconsistent-return-statements _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[None] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(scale_job_parameters, (IO, bytes)): + if isinstance(scale_job_parameters, (IOBase, bytes)): _content = scale_job_parameters else: if scale_job_parameters is not None: @@ -1243,10 +1267,11 @@ async def _scale_initial( # pylint: disable=inconsistent-return-statements params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1259,7 +1284,9 @@ async def _scale_initial( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - _scale_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/scale"} # type: ignore + _scale_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/scale" + } @overload async def begin_scale( @@ -1349,7 +1376,7 @@ async def begin_scale( :param job_name: The name of the streaming job. Required. :type job_name: str :param scale_job_parameters: Parameters applicable to a scale streaming job operation. Is - either a model type or a IO type. Default value is None. + either a ScaleStreamingJobParameters type or a IO type. Default value is None. :type scale_job_parameters: ~azure.mgmt.streamanalytics.models.ScaleStreamingJobParameters or IO :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. @@ -1370,14 +1397,12 @@ async def begin_scale( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[None] - polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._scale_initial( # type: ignore resource_group_name=resource_group_name, @@ -1397,7 +1422,7 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- return cls(pipeline_response, None, {}) if polling is True: - polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) else: @@ -1409,6 +1434,8 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- client=self._client, deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_scale.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/scale"} # type: ignore + begin_scale.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/scale" + } diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_subscriptions_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_subscriptions_operations.py index a3bababeec4d..dee363e07297 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_subscriptions_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_subscriptions_operations.py @@ -6,8 +6,7 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys -from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +from typing import Any, Callable, Dict, Optional, TypeVar from azure.core.exceptions import ( ClientAuthenticationError, @@ -19,28 +18,15 @@ ) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat -from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models from ..._vendor import _convert_request -from ...operations._subscriptions_operations import ( - build_compile_query_request, - build_list_quotas_request, - build_sample_input_request, - build_test_input_request, - build_test_output_request, - build_test_query_request, -) +from ...operations._subscriptions_operations import build_list_quotas_request -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -68,8 +54,9 @@ def __init__(self, *args, **kwargs) -> None: async def list_quotas(self, location: str, **kwargs: Any) -> _models.SubscriptionQuotasListResult: """Retrieves the subscription's current quota information in a particular region. - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. + :param location: The region in which to retrieve the subscription's quota information. You can + find out which regions Azure Stream Analytics is supported in here: + https://azure.microsoft.com/en-us/regions/. Required. :type location: str :keyword callable cls: A custom type or function that will be passed the direct response :return: SubscriptionQuotasListResult or the result of cls(response) @@ -87,10 +74,8 @@ async def list_quotas(self, location: str, **kwargs: Any) -> _models.Subscriptio _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[_models.SubscriptionQuotasListResult] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.SubscriptionQuotasListResult] = kwargs.pop("cls", None) request = build_list_quotas_request( location=location, @@ -101,10 +86,11 @@ async def list_quotas(self, location: str, **kwargs: Any) -> _models.Subscriptio params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -121,932 +107,6 @@ async def list_quotas(self, location: str, **kwargs: Any) -> _models.Subscriptio return deserialized - list_quotas.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/quotas"} # type: ignore - - async def _test_query_initial( - self, location: str, test_query: Union[_models.TestQuery, IO], **kwargs: Any - ) -> Optional[_models.QueryTestingResult]: - error_map = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.QueryTestingResult]] - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(test_query, (IO, bytes)): - _content = test_query - else: - _json = self._serialize.body(test_query, "TestQuery") - - request = build_test_query_request( - location=location, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - template_url=self._test_query_initial.metadata["url"], - headers=_headers, - params=_params, - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore - - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("QueryTestingResult", pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - - _test_query_initial.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/testQuery"} # type: ignore - - @overload - async def begin_test_query( - self, location: str, test_query: _models.TestQuery, *, content_type: str = "application/json", **kwargs: Any - ) -> AsyncLROPoller[_models.QueryTestingResult]: - """Test the Stream Analytics query on a sample input. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param test_query: The query testing object that defines the input, output, and transformation - for the query testing. Required. - :type test_query: ~azure.mgmt.streamanalytics.models.TestQuery - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either QueryTestingResult or the result of - cls(response) - :rtype: - ~azure.core.polling.AsyncLROPoller[~azure.mgmt.streamanalytics.models.QueryTestingResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def begin_test_query( - self, location: str, test_query: IO, *, content_type: str = "application/json", **kwargs: Any - ) -> AsyncLROPoller[_models.QueryTestingResult]: - """Test the Stream Analytics query on a sample input. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param test_query: The query testing object that defines the input, output, and transformation - for the query testing. Required. - :type test_query: IO - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either QueryTestingResult or the result of - cls(response) - :rtype: - ~azure.core.polling.AsyncLROPoller[~azure.mgmt.streamanalytics.models.QueryTestingResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def begin_test_query( - self, location: str, test_query: Union[_models.TestQuery, IO], **kwargs: Any - ) -> AsyncLROPoller[_models.QueryTestingResult]: - """Test the Stream Analytics query on a sample input. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param test_query: The query testing object that defines the input, output, and transformation - for the query testing. Is either a model type or a IO type. Required. - :type test_query: ~azure.mgmt.streamanalytics.models.TestQuery or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either QueryTestingResult or the result of - cls(response) - :rtype: - ~azure.core.polling.AsyncLROPoller[~azure.mgmt.streamanalytics.models.QueryTestingResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.QueryTestingResult] - polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] - if cont_token is None: - raw_result = await self._test_query_initial( # type: ignore - location=location, - test_query=test_query, - api_version=api_version, - content_type=content_type, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize("QueryTestingResult", pipeline_response) - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - if polling is True: - polling_method = cast( - AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) - ) # type: AsyncPollingMethod - elif polling is False: - polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) - else: - polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - - begin_test_query.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/testQuery"} # type: ignore - - @overload - async def compile_query( - self, - location: str, - compile_query: _models.CompileQuery, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.QueryCompilationResult: - """Compile the Stream Analytics query. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param compile_query: The query compilation object which defines the input, output, and - transformation for the query compilation. Required. - :type compile_query: ~azure.mgmt.streamanalytics.models.CompileQuery - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: QueryCompilationResult or the result of cls(response) - :rtype: ~azure.mgmt.streamanalytics.models.QueryCompilationResult - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def compile_query( - self, location: str, compile_query: IO, *, content_type: str = "application/json", **kwargs: Any - ) -> _models.QueryCompilationResult: - """Compile the Stream Analytics query. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param compile_query: The query compilation object which defines the input, output, and - transformation for the query compilation. Required. - :type compile_query: IO - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: QueryCompilationResult or the result of cls(response) - :rtype: ~azure.mgmt.streamanalytics.models.QueryCompilationResult - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def compile_query( - self, location: str, compile_query: Union[_models.CompileQuery, IO], **kwargs: Any - ) -> _models.QueryCompilationResult: - """Compile the Stream Analytics query. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param compile_query: The query compilation object which defines the input, output, and - transformation for the query compilation. Is either a model type or a IO type. Required. - :type compile_query: ~azure.mgmt.streamanalytics.models.CompileQuery or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: QueryCompilationResult or the result of cls(response) - :rtype: ~azure.mgmt.streamanalytics.models.QueryCompilationResult - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.QueryCompilationResult] - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(compile_query, (IO, bytes)): - _content = compile_query - else: - _json = self._serialize.body(compile_query, "CompileQuery") - - request = build_compile_query_request( - location=location, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - template_url=self.compile_query.metadata["url"], - headers=_headers, - params=_params, - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore - - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("QueryCompilationResult", pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - - compile_query.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/compileQuery"} # type: ignore - - async def _sample_input_initial( - self, location: str, sample_input: Union[_models.SampleInput, IO], **kwargs: Any - ) -> _models.SampleInputResult: - error_map = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.SampleInputResult] - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(sample_input, (IO, bytes)): - _content = sample_input - else: - _json = self._serialize.body(sample_input, "SampleInput") - - request = build_sample_input_request( - location=location, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - template_url=self._sample_input_initial.metadata["url"], - headers=_headers, - params=_params, - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore - - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("SampleInputResult", pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - - _sample_input_initial.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/sampleInput"} # type: ignore - - @overload - async def begin_sample_input( - self, location: str, sample_input: _models.SampleInput, *, content_type: str = "application/json", **kwargs: Any - ) -> AsyncLROPoller[_models.SampleInputResult]: - """Sample the Stream Analytics input data. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param sample_input: Defines the necessary parameters for sampling the Stream Analytics input - data. Required. - :type sample_input: ~azure.mgmt.streamanalytics.models.SampleInput - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either SampleInputResult or the result of - cls(response) - :rtype: - ~azure.core.polling.AsyncLROPoller[~azure.mgmt.streamanalytics.models.SampleInputResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def begin_sample_input( - self, location: str, sample_input: IO, *, content_type: str = "application/json", **kwargs: Any - ) -> AsyncLROPoller[_models.SampleInputResult]: - """Sample the Stream Analytics input data. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param sample_input: Defines the necessary parameters for sampling the Stream Analytics input - data. Required. - :type sample_input: IO - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either SampleInputResult or the result of - cls(response) - :rtype: - ~azure.core.polling.AsyncLROPoller[~azure.mgmt.streamanalytics.models.SampleInputResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def begin_sample_input( - self, location: str, sample_input: Union[_models.SampleInput, IO], **kwargs: Any - ) -> AsyncLROPoller[_models.SampleInputResult]: - """Sample the Stream Analytics input data. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param sample_input: Defines the necessary parameters for sampling the Stream Analytics input - data. Is either a model type or a IO type. Required. - :type sample_input: ~azure.mgmt.streamanalytics.models.SampleInput or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either SampleInputResult or the result of - cls(response) - :rtype: - ~azure.core.polling.AsyncLROPoller[~azure.mgmt.streamanalytics.models.SampleInputResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.SampleInputResult] - polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] - if cont_token is None: - raw_result = await self._sample_input_initial( # type: ignore - location=location, - sample_input=sample_input, - api_version=api_version, - content_type=content_type, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize("SampleInputResult", pipeline_response) - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - if polling is True: - polling_method = cast( - AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) - ) # type: AsyncPollingMethod - elif polling is False: - polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) - else: - polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - - begin_sample_input.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/sampleInput"} # type: ignore - - async def _test_input_initial( - self, location: str, test_input: Union[_models.TestInput, IO], **kwargs: Any - ) -> _models.TestDatasourceResult: - error_map = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.TestDatasourceResult] - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(test_input, (IO, bytes)): - _content = test_input - else: - _json = self._serialize.body(test_input, "TestInput") - - request = build_test_input_request( - location=location, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - template_url=self._test_input_initial.metadata["url"], - headers=_headers, - params=_params, - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore - - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("TestDatasourceResult", pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - - _test_input_initial.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/testInput"} # type: ignore - - @overload - async def begin_test_input( - self, location: str, test_input: _models.TestInput, *, content_type: str = "application/json", **kwargs: Any - ) -> AsyncLROPoller[_models.TestDatasourceResult]: - """Test the Stream Analytics input. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param test_input: Defines the necessary parameters for testing the Stream Analytics input. - Required. - :type test_input: ~azure.mgmt.streamanalytics.models.TestInput - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either TestDatasourceResult or the result - of cls(response) - :rtype: - ~azure.core.polling.AsyncLROPoller[~azure.mgmt.streamanalytics.models.TestDatasourceResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def begin_test_input( - self, location: str, test_input: IO, *, content_type: str = "application/json", **kwargs: Any - ) -> AsyncLROPoller[_models.TestDatasourceResult]: - """Test the Stream Analytics input. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param test_input: Defines the necessary parameters for testing the Stream Analytics input. - Required. - :type test_input: IO - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either TestDatasourceResult or the result - of cls(response) - :rtype: - ~azure.core.polling.AsyncLROPoller[~azure.mgmt.streamanalytics.models.TestDatasourceResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def begin_test_input( - self, location: str, test_input: Union[_models.TestInput, IO], **kwargs: Any - ) -> AsyncLROPoller[_models.TestDatasourceResult]: - """Test the Stream Analytics input. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param test_input: Defines the necessary parameters for testing the Stream Analytics input. Is - either a model type or a IO type. Required. - :type test_input: ~azure.mgmt.streamanalytics.models.TestInput or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either TestDatasourceResult or the result - of cls(response) - :rtype: - ~azure.core.polling.AsyncLROPoller[~azure.mgmt.streamanalytics.models.TestDatasourceResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.TestDatasourceResult] - polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] - if cont_token is None: - raw_result = await self._test_input_initial( # type: ignore - location=location, - test_input=test_input, - api_version=api_version, - content_type=content_type, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize("TestDatasourceResult", pipeline_response) - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - if polling is True: - polling_method = cast( - AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) - ) # type: AsyncPollingMethod - elif polling is False: - polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) - else: - polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - - begin_test_input.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/testInput"} # type: ignore - - async def _test_output_initial( - self, location: str, test_output: Union[_models.TestOutput, IO], **kwargs: Any - ) -> _models.TestDatasourceResult: - error_map = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.TestDatasourceResult] - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(test_output, (IO, bytes)): - _content = test_output - else: - _json = self._serialize.body(test_output, "TestOutput") - - request = build_test_output_request( - location=location, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - template_url=self._test_output_initial.metadata["url"], - headers=_headers, - params=_params, - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore - - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("TestDatasourceResult", pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - - _test_output_initial.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/testOutput"} # type: ignore - - @overload - async def begin_test_output( - self, location: str, test_output: _models.TestOutput, *, content_type: str = "application/json", **kwargs: Any - ) -> AsyncLROPoller[_models.TestDatasourceResult]: - """Test the Stream Analytics output. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param test_output: Defines the necessary parameters for testing the Stream Analytics output. - Required. - :type test_output: ~azure.mgmt.streamanalytics.models.TestOutput - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either TestDatasourceResult or the result - of cls(response) - :rtype: - ~azure.core.polling.AsyncLROPoller[~azure.mgmt.streamanalytics.models.TestDatasourceResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - async def begin_test_output( - self, location: str, test_output: IO, *, content_type: str = "application/json", **kwargs: Any - ) -> AsyncLROPoller[_models.TestDatasourceResult]: - """Test the Stream Analytics output. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param test_output: Defines the necessary parameters for testing the Stream Analytics output. - Required. - :type test_output: IO - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either TestDatasourceResult or the result - of cls(response) - :rtype: - ~azure.core.polling.AsyncLROPoller[~azure.mgmt.streamanalytics.models.TestDatasourceResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace_async - async def begin_test_output( - self, location: str, test_output: Union[_models.TestOutput, IO], **kwargs: Any - ) -> AsyncLROPoller[_models.TestDatasourceResult]: - """Test the Stream Analytics output. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param test_output: Defines the necessary parameters for testing the Stream Analytics output. - Is either a model type or a IO type. Required. - :type test_output: ~azure.mgmt.streamanalytics.models.TestOutput or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either TestDatasourceResult or the result - of cls(response) - :rtype: - ~azure.core.polling.AsyncLROPoller[~azure.mgmt.streamanalytics.models.TestDatasourceResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.TestDatasourceResult] - polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] - if cont_token is None: - raw_result = await self._test_output_initial( # type: ignore - location=location, - test_output=test_output, - api_version=api_version, - content_type=content_type, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize("TestDatasourceResult", pipeline_response) - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - if polling is True: - polling_method = cast( - AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) - ) # type: AsyncPollingMethod - elif polling is False: - polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) - else: - polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - - begin_test_output.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/testOutput"} # type: ignore + list_quotas.metadata = { + "url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/quotas" + } diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_transformations_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_transformations_operations.py index 20ca21ba0e4b..9af3a7874d88 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_transformations_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/aio/operations/_transformations_operations.py @@ -6,7 +6,7 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload from azure.core.exceptions import ( @@ -32,10 +32,6 @@ build_update_request, ) -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -167,8 +163,8 @@ async def create_or_replace( :param transformation_name: The name of the transformation. Required. :type transformation_name: str :param transformation: The definition of the transformation that will be used to create a new - transformation or replace the existing one under the streaming job. Is either a model type or a - IO type. Required. + transformation or replace the existing one under the streaming job. Is either a Transformation + type or a IO type. Required. :type transformation: ~azure.mgmt.streamanalytics.models.Transformation or IO :param if_match: The ETag of the transformation. Omit this value to always overwrite the current transformation. Specify the last-seen ETag value to prevent accidentally overwriting @@ -197,16 +193,14 @@ async def create_or_replace( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.Transformation] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Transformation] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(transformation, (IO, bytes)): + if isinstance(transformation, (IOBase, bytes)): _content = transformation else: _json = self._serialize.body(transformation, "Transformation") @@ -227,10 +221,11 @@ async def create_or_replace( params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -252,11 +247,13 @@ async def create_or_replace( deserialized = self._deserialize("Transformation", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore - create_or_replace.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/transformations/{transformationName}"} # type: ignore + create_or_replace.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/transformations/{transformationName}" + } @overload async def update( @@ -367,7 +364,7 @@ async def update( the corresponding properties in the existing transformation (ie. Those properties will be updated). Any properties that are set to null here will mean that the corresponding property in the existing transformation will remain the same and not change as a result of this PATCH - operation. Is either a model type or a IO type. Required. + operation. Is either a Transformation type or a IO type. Required. :type transformation: ~azure.mgmt.streamanalytics.models.Transformation or IO :param if_match: The ETag of the transformation. Omit this value to always overwrite the current transformation. Specify the last-seen ETag value to prevent accidentally overwriting @@ -392,16 +389,14 @@ async def update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.Transformation] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Transformation] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(transformation, (IO, bytes)): + if isinstance(transformation, (IOBase, bytes)): _content = transformation else: _json = self._serialize.body(transformation, "Transformation") @@ -421,10 +416,11 @@ async def update( params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -444,7 +440,9 @@ async def update( return deserialized - update.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/transformations/{transformationName}"} # type: ignore + update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/transformations/{transformationName}" + } @distributed_trace_async async def get( @@ -475,10 +473,8 @@ async def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[_models.Transformation] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.Transformation] = kwargs.pop("cls", None) request = build_get_request( resource_group_name=resource_group_name, @@ -491,10 +487,11 @@ async def get( params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -514,4 +511,6 @@ async def get( return deserialized - get.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/transformations/{transformationName}"} # type: ignore + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/transformations/{transformationName}" + } diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/__init__.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/__init__.py index 831f8a6fc4c3..7873203fda0d 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/__init__.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/__init__.py @@ -11,16 +11,11 @@ from ._models_py3 import AzureDataLakeStoreOutputDataSource from ._models_py3 import AzureDataLakeStoreOutputDataSourceProperties from ._models_py3 import AzureFunctionOutputDataSource -from ._models_py3 import AzureMachineLearningServiceFunctionBinding -from ._models_py3 import AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters -from ._models_py3 import AzureMachineLearningServiceInputColumn -from ._models_py3 import AzureMachineLearningServiceInputs -from ._models_py3 import AzureMachineLearningServiceOutputColumn -from ._models_py3 import AzureMachineLearningStudioFunctionBinding -from ._models_py3 import AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters -from ._models_py3 import AzureMachineLearningStudioInputColumn -from ._models_py3 import AzureMachineLearningStudioInputs -from ._models_py3 import AzureMachineLearningStudioOutputColumn +from ._models_py3 import AzureMachineLearningWebServiceFunctionBinding +from ._models_py3 import AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters +from ._models_py3 import AzureMachineLearningWebServiceInputColumn +from ._models_py3 import AzureMachineLearningWebServiceInputs +from ._models_py3 import AzureMachineLearningWebServiceOutputColumn from ._models_py3 import AzureSqlDatabaseDataSourceProperties from ._models_py3 import AzureSqlDatabaseOutputDataSource from ._models_py3 import AzureSqlDatabaseOutputDataSourceProperties @@ -36,19 +31,14 @@ from ._models_py3 import BlobReferenceInputDataSourceProperties from ._models_py3 import BlobStreamInputDataSource from ._models_py3 import BlobStreamInputDataSourceProperties -from ._models_py3 import CSharpFunctionBinding -from ._models_py3 import CSharpFunctionRetrieveDefaultDefinitionParameters from ._models_py3 import Cluster from ._models_py3 import ClusterInfo from ._models_py3 import ClusterJob from ._models_py3 import ClusterJobListResult from ._models_py3 import ClusterListResult -from ._models_py3 import ClusterProperties from ._models_py3 import ClusterSku -from ._models_py3 import CompileQuery from ._models_py3 import Compression from ._models_py3 import CsvSerialization -from ._models_py3 import CustomClrSerialization from ._models_py3 import DiagnosticCondition from ._models_py3 import Diagnostics from ._models_py3 import DocumentDbOutputDataSource @@ -56,7 +46,6 @@ from ._models_py3 import ErrorDetails from ._models_py3 import ErrorError from ._models_py3 import ErrorResponse -from ._models_py3 import EventGridStreamInputDataSource from ._models_py3 import EventHubDataSourceProperties from ._models_py3 import EventHubOutputDataSource from ._models_py3 import EventHubOutputDataSourceProperties @@ -64,7 +53,6 @@ from ._models_py3 import EventHubStreamInputDataSourceProperties from ._models_py3 import EventHubV2OutputDataSource from ._models_py3 import EventHubV2StreamInputDataSource -from ._models_py3 import External from ._models_py3 import FileReferenceInputDataSource from ._models_py3 import Function from ._models_py3 import FunctionBinding @@ -78,20 +66,15 @@ from ._models_py3 import GatewayMessageBusSourceProperties from ._models_py3 import GatewayMessageBusStreamInputDataSource from ._models_py3 import GatewayMessageBusStreamInputDataSourceProperties -from ._models_py3 import GetStreamingJobSkuResult -from ._models_py3 import GetStreamingJobSkuResultSku -from ._models_py3 import GetStreamingJobSkuResults from ._models_py3 import Identity from ._models_py3 import Input from ._models_py3 import InputListResult from ._models_py3 import InputProperties -from ._models_py3 import InputWatermarkProperties from ._models_py3 import IoTHubStreamInputDataSource from ._models_py3 import JavaScriptFunctionBinding from ._models_py3 import JavaScriptFunctionRetrieveDefaultDefinitionParameters from ._models_py3 import JobStorageAccount from ._models_py3 import JsonSerialization -from ._models_py3 import LastOutputEventTimestamp from ._models_py3 import OAuthBasedDataSourceProperties from ._models_py3 import Operation from ._models_py3 import OperationDisplay @@ -99,34 +82,18 @@ from ._models_py3 import Output from ._models_py3 import OutputDataSource from ._models_py3 import OutputListResult -from ._models_py3 import OutputWatermarkProperties from ._models_py3 import ParquetSerialization -from ._models_py3 import PostgreSQLDataSourceProperties -from ._models_py3 import PostgreSQLOutputDataSource -from ._models_py3 import PostgreSQLOutputDataSourceProperties from ._models_py3 import PowerBIOutputDataSource from ._models_py3 import PowerBIOutputDataSourceProperties from ._models_py3 import PrivateEndpoint from ._models_py3 import PrivateEndpointListResult -from ._models_py3 import PrivateEndpointProperties from ._models_py3 import PrivateLinkConnectionState from ._models_py3 import PrivateLinkServiceConnection from ._models_py3 import ProxyResource -from ._models_py3 import QueryCompilationError -from ._models_py3 import QueryCompilationResult -from ._models_py3 import QueryFunction -from ._models_py3 import QueryInput -from ._models_py3 import QueryTestingResult -from ._models_py3 import RawOutputDatasource -from ._models_py3 import RawReferenceInputDataSource -from ._models_py3 import RawStreamInputDataSource from ._models_py3 import ReferenceInputDataSource from ._models_py3 import ReferenceInputProperties -from ._models_py3 import RefreshConfiguration from ._models_py3 import Resource from ._models_py3 import ResourceTestStatus -from ._models_py3 import SampleInput -from ._models_py3 import SampleInputResult from ._models_py3 import ScalarFunctionProperties from ._models_py3 import ScaleStreamingJobParameters from ._models_py3 import Serialization @@ -136,7 +103,6 @@ from ._models_py3 import ServiceBusTopicOutputDataSource from ._models_py3 import ServiceBusTopicOutputDataSourceProperties from ._models_py3 import Sku -from ._models_py3 import SkuCapacity from ._models_py3 import StartStreamingJobParameters from ._models_py3 import StorageAccount from ._models_py3 import StreamInputDataSource @@ -146,43 +112,27 @@ from ._models_py3 import SubResource from ._models_py3 import SubscriptionQuota from ._models_py3 import SubscriptionQuotasListResult -from ._models_py3 import TestDatasourceResult -from ._models_py3 import TestInput -from ._models_py3 import TestOutput -from ._models_py3 import TestQuery -from ._models_py3 import TestQueryDiagnostics from ._models_py3 import TrackedResource from ._models_py3 import Transformation from ._stream_analytics_management_client_enums import AuthenticationMode -from ._stream_analytics_management_client_enums import BlobWriteMode from ._stream_analytics_management_client_enums import ClusterProvisioningState from ._stream_analytics_management_client_enums import ClusterSkuName from ._stream_analytics_management_client_enums import CompatibilityLevel from ._stream_analytics_management_client_enums import CompressionType from ._stream_analytics_management_client_enums import ContentStoragePolicy from ._stream_analytics_management_client_enums import Encoding -from ._stream_analytics_management_client_enums import EventGridEventSchemaType from ._stream_analytics_management_client_enums import EventSerializationType from ._stream_analytics_management_client_enums import EventsOutOfOrderPolicy -from ._stream_analytics_management_client_enums import InputWatermarkMode from ._stream_analytics_management_client_enums import JobState from ._stream_analytics_management_client_enums import JobType from ._stream_analytics_management_client_enums import JsonOutputSerializationFormat from ._stream_analytics_management_client_enums import OutputErrorPolicy from ._stream_analytics_management_client_enums import OutputStartMode -from ._stream_analytics_management_client_enums import OutputWatermarkMode -from ._stream_analytics_management_client_enums import QueryTestingResultStatus from ._stream_analytics_management_client_enums import RefreshType -from ._stream_analytics_management_client_enums import ResourceType -from ._stream_analytics_management_client_enums import SampleInputResultStatus -from ._stream_analytics_management_client_enums import SkuCapacityScaleType from ._stream_analytics_management_client_enums import SkuName -from ._stream_analytics_management_client_enums import TestDatasourceResultStatus -from ._stream_analytics_management_client_enums import UpdatableUdfRefreshType -from ._stream_analytics_management_client_enums import UpdateMode from ._patch import __all__ as _patch_all -from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import +from ._patch import * # pylint: disable=unused-wildcard-import from ._patch import patch_sdk as _patch_sdk __all__ = [ @@ -191,16 +141,11 @@ "AzureDataLakeStoreOutputDataSource", "AzureDataLakeStoreOutputDataSourceProperties", "AzureFunctionOutputDataSource", - "AzureMachineLearningServiceFunctionBinding", - "AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters", - "AzureMachineLearningServiceInputColumn", - "AzureMachineLearningServiceInputs", - "AzureMachineLearningServiceOutputColumn", - "AzureMachineLearningStudioFunctionBinding", - "AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters", - "AzureMachineLearningStudioInputColumn", - "AzureMachineLearningStudioInputs", - "AzureMachineLearningStudioOutputColumn", + "AzureMachineLearningWebServiceFunctionBinding", + "AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters", + "AzureMachineLearningWebServiceInputColumn", + "AzureMachineLearningWebServiceInputs", + "AzureMachineLearningWebServiceOutputColumn", "AzureSqlDatabaseDataSourceProperties", "AzureSqlDatabaseOutputDataSource", "AzureSqlDatabaseOutputDataSourceProperties", @@ -216,19 +161,14 @@ "BlobReferenceInputDataSourceProperties", "BlobStreamInputDataSource", "BlobStreamInputDataSourceProperties", - "CSharpFunctionBinding", - "CSharpFunctionRetrieveDefaultDefinitionParameters", "Cluster", "ClusterInfo", "ClusterJob", "ClusterJobListResult", "ClusterListResult", - "ClusterProperties", "ClusterSku", - "CompileQuery", "Compression", "CsvSerialization", - "CustomClrSerialization", "DiagnosticCondition", "Diagnostics", "DocumentDbOutputDataSource", @@ -236,7 +176,6 @@ "ErrorDetails", "ErrorError", "ErrorResponse", - "EventGridStreamInputDataSource", "EventHubDataSourceProperties", "EventHubOutputDataSource", "EventHubOutputDataSourceProperties", @@ -244,7 +183,6 @@ "EventHubStreamInputDataSourceProperties", "EventHubV2OutputDataSource", "EventHubV2StreamInputDataSource", - "External", "FileReferenceInputDataSource", "Function", "FunctionBinding", @@ -258,20 +196,15 @@ "GatewayMessageBusSourceProperties", "GatewayMessageBusStreamInputDataSource", "GatewayMessageBusStreamInputDataSourceProperties", - "GetStreamingJobSkuResult", - "GetStreamingJobSkuResultSku", - "GetStreamingJobSkuResults", "Identity", "Input", "InputListResult", "InputProperties", - "InputWatermarkProperties", "IoTHubStreamInputDataSource", "JavaScriptFunctionBinding", "JavaScriptFunctionRetrieveDefaultDefinitionParameters", "JobStorageAccount", "JsonSerialization", - "LastOutputEventTimestamp", "OAuthBasedDataSourceProperties", "Operation", "OperationDisplay", @@ -279,34 +212,18 @@ "Output", "OutputDataSource", "OutputListResult", - "OutputWatermarkProperties", "ParquetSerialization", - "PostgreSQLDataSourceProperties", - "PostgreSQLOutputDataSource", - "PostgreSQLOutputDataSourceProperties", "PowerBIOutputDataSource", "PowerBIOutputDataSourceProperties", "PrivateEndpoint", "PrivateEndpointListResult", - "PrivateEndpointProperties", "PrivateLinkConnectionState", "PrivateLinkServiceConnection", "ProxyResource", - "QueryCompilationError", - "QueryCompilationResult", - "QueryFunction", - "QueryInput", - "QueryTestingResult", - "RawOutputDatasource", - "RawReferenceInputDataSource", - "RawStreamInputDataSource", "ReferenceInputDataSource", "ReferenceInputProperties", - "RefreshConfiguration", "Resource", "ResourceTestStatus", - "SampleInput", - "SampleInputResult", "ScalarFunctionProperties", "ScaleStreamingJobParameters", "Serialization", @@ -316,7 +233,6 @@ "ServiceBusTopicOutputDataSource", "ServiceBusTopicOutputDataSourceProperties", "Sku", - "SkuCapacity", "StartStreamingJobParameters", "StorageAccount", "StreamInputDataSource", @@ -326,40 +242,24 @@ "SubResource", "SubscriptionQuota", "SubscriptionQuotasListResult", - "TestDatasourceResult", - "TestInput", - "TestOutput", - "TestQuery", - "TestQueryDiagnostics", "TrackedResource", "Transformation", "AuthenticationMode", - "BlobWriteMode", "ClusterProvisioningState", "ClusterSkuName", "CompatibilityLevel", "CompressionType", "ContentStoragePolicy", "Encoding", - "EventGridEventSchemaType", "EventSerializationType", "EventsOutOfOrderPolicy", - "InputWatermarkMode", "JobState", "JobType", "JsonOutputSerializationFormat", "OutputErrorPolicy", "OutputStartMode", - "OutputWatermarkMode", - "QueryTestingResultStatus", "RefreshType", - "ResourceType", - "SampleInputResultStatus", - "SkuCapacityScaleType", "SkuName", - "TestDatasourceResultStatus", - "UpdatableUdfRefreshType", - "UpdateMode", ] __all__.extend([p for p in _patch_all if p not in __all__]) _patch_sdk() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models_py3.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models_py3.py index 5d1acb2232c2..fcba3d856935 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models_py3.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_models_py3.py @@ -74,8 +74,8 @@ def __init__( inputs: Optional[List["_models.FunctionInput"]] = None, output: Optional["_models.FunctionOutput"] = None, binding: Optional["_models.FunctionBinding"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword inputs: :paramtype inputs: list[~azure.mgmt.streamanalytics.models.FunctionInput] @@ -86,7 +86,7 @@ def __init__( :paramtype binding: ~azure.mgmt.streamanalytics.models.FunctionBinding """ super().__init__(**kwargs) - self.type = None # type: Optional[str] + self.type: Optional[str] = None self.etag = None self.inputs = inputs self.output = output @@ -134,8 +134,8 @@ def __init__( inputs: Optional[List["_models.FunctionInput"]] = None, output: Optional["_models.FunctionOutput"] = None, binding: Optional["_models.FunctionBinding"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword inputs: :paramtype inputs: list[~azure.mgmt.streamanalytics.models.FunctionInput] @@ -146,21 +146,20 @@ def __init__( :paramtype binding: ~azure.mgmt.streamanalytics.models.FunctionBinding """ super().__init__(inputs=inputs, output=output, binding=binding, **kwargs) - self.type = "Aggregate" # type: str + self.type: str = "Aggregate" class Serialization(_serialization.Model): - """Describes how data from an input is serialized or how data is serialized when written to an output. + """Describes how data from an input is serialized or how data is serialized when written to an + output. You probably want to use the sub-classes and not this class directly. Known sub-classes are: - AvroSerialization, CsvSerialization, CustomClrSerialization, JsonSerialization, - ParquetSerialization + AvroSerialization, CsvSerialization, JsonSerialization, ParquetSerialization All required parameters must be populated in order to send to Azure. :ivar type: Indicates the type of serialization that the input or output uses. Required on PUT - (CreateOrReplace) requests. Required. Known values are: "Csv", "Avro", "Json", "CustomClr", and - "Parquet". + (CreateOrReplace) requests. Required. Known values are: "Csv", "Avro", "Json", and "Parquet". :vartype type: str or ~azure.mgmt.streamanalytics.models.EventSerializationType """ @@ -176,26 +175,25 @@ class Serialization(_serialization.Model): "type": { "Avro": "AvroSerialization", "Csv": "CsvSerialization", - "CustomClr": "CustomClrSerialization", "Json": "JsonSerialization", "Parquet": "ParquetSerialization", } } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) - self.type = None # type: Optional[str] + self.type: Optional[str] = None class AvroSerialization(Serialization): - """Describes how data from an input is serialized or how data is serialized when written to an output in Avro format. + """Describes how data from an input is serialized or how data is serialized when written to an + output in Avro format. All required parameters must be populated in order to send to Azure. :ivar type: Indicates the type of serialization that the input or output uses. Required on PUT - (CreateOrReplace) requests. Required. Known values are: "Csv", "Avro", "Json", "CustomClr", and - "Parquet". + (CreateOrReplace) requests. Required. Known values are: "Csv", "Avro", "Json", and "Parquet". :vartype type: str or ~azure.mgmt.streamanalytics.models.EventSerializationType :ivar properties: The properties that are associated with the Avro serialization type. Required on PUT (CreateOrReplace) requests. @@ -211,14 +209,14 @@ class AvroSerialization(Serialization): "properties": {"key": "properties", "type": "object"}, } - def __init__(self, *, properties: Optional[JSON] = None, **kwargs): + def __init__(self, *, properties: Optional[JSON] = None, **kwargs: Any) -> None: """ :keyword properties: The properties that are associated with the Avro serialization type. Required on PUT (CreateOrReplace) requests. :paramtype properties: JSON """ super().__init__(**kwargs) - self.type = "Avro" # type: str + self.type: str = "Avro" self.properties = properties @@ -226,11 +224,11 @@ class OutputDataSource(_serialization.Model): """Describes the data source that output will be written to. You probably want to use the sub-classes and not this class directly. Known sub-classes are: - GatewayMessageBusOutputDataSource, AzureFunctionOutputDataSource, PostgreSQLOutputDataSource, + GatewayMessageBusOutputDataSource, AzureFunctionOutputDataSource, AzureDataLakeStoreOutputDataSource, EventHubV2OutputDataSource, EventHubOutputDataSource, ServiceBusQueueOutputDataSource, ServiceBusTopicOutputDataSource, AzureSynapseOutputDataSource, AzureSqlDatabaseOutputDataSource, BlobOutputDataSource, DocumentDbOutputDataSource, - AzureTableOutputDataSource, PowerBIOutputDataSource, RawOutputDatasource + AzureTableOutputDataSource, PowerBIOutputDataSource All required parameters must be populated in order to send to Azure. @@ -251,7 +249,6 @@ class OutputDataSource(_serialization.Model): "type": { "GatewayMessageBus": "GatewayMessageBusOutputDataSource", "Microsoft.AzureFunction": "AzureFunctionOutputDataSource", - "Microsoft.DBForPostgreSQL/servers/databases": "PostgreSQLOutputDataSource", "Microsoft.DataLake/Accounts": "AzureDataLakeStoreOutputDataSource", "Microsoft.EventHub/EventHub": "EventHubV2OutputDataSource", "Microsoft.ServiceBus/EventHub": "EventHubOutputDataSource", @@ -263,14 +260,13 @@ class OutputDataSource(_serialization.Model): "Microsoft.Storage/DocumentDB": "DocumentDbOutputDataSource", "Microsoft.Storage/Table": "AzureTableOutputDataSource", "PowerBI": "PowerBIOutputDataSource", - "Raw": "RawOutputDatasource", } } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) - self.type = None # type: Optional[str] + self.type: Optional[str] = None class AzureDataLakeStoreOutputDataSource(OutputDataSource): @@ -345,8 +341,8 @@ def __init__( date_format: Optional[str] = None, time_format: Optional[str] = None, authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword refresh_token: A refresh token that can be used to obtain a valid access token that can then be used to authenticate with the data source. A valid refresh token is currently only @@ -383,7 +379,7 @@ def __init__( :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode """ super().__init__(**kwargs) - self.type = "Microsoft.DataLake/Accounts" # type: str + self.type: str = "Microsoft.DataLake/Accounts" self.refresh_token = refresh_token self.token_user_principal_name = token_user_principal_name self.token_user_display_name = token_user_display_name @@ -396,7 +392,8 @@ def __init__( class OAuthBasedDataSourceProperties(_serialization.Model): - """The properties that are associated with data sources that use OAuth as their authentication model. + """The properties that are associated with data sources that use OAuth as their authentication + model. :ivar refresh_token: A refresh token that can be used to obtain a valid access token that can then be used to authenticate with the data source. A valid refresh token is currently only @@ -427,8 +424,8 @@ def __init__( refresh_token: Optional[str] = None, token_user_principal_name: Optional[str] = None, token_user_display_name: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword refresh_token: A refresh token that can be used to obtain a valid access token that can then be used to authenticate with the data source. A valid refresh token is currently only @@ -514,8 +511,8 @@ def __init__( date_format: Optional[str] = None, time_format: Optional[str] = None, authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword refresh_token: A refresh token that can be used to obtain a valid access token that can then be used to authenticate with the data source. A valid refresh token is currently only @@ -610,8 +607,8 @@ def __init__( api_key: Optional[str] = None, max_batch_size: Optional[float] = None, max_batch_count: Optional[float] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword function_app_name: The name of your Azure Functions app. :paramtype function_app_name: str @@ -629,7 +626,7 @@ def __init__( :paramtype max_batch_count: float """ super().__init__(**kwargs) - self.type = "Microsoft.AzureFunction" # type: str + self.type: str = "Microsoft.AzureFunction" self.function_app_name = function_app_name self.function_name = function_name self.api_key = api_key @@ -638,11 +635,11 @@ def __init__( class FunctionBinding(_serialization.Model): - """The physical binding of the function. For example, in the Azure Machine Learning web service’s case, this describes the endpoint. + """The physical binding of the function. For example, in the Azure Machine Learning web service’s + case, this describes the endpoint. You probably want to use the sub-classes and not this class directly. Known sub-classes are: - AzureMachineLearningStudioFunctionBinding, AzureMachineLearningServiceFunctionBinding, - CSharpFunctionBinding, JavaScriptFunctionBinding + AzureMachineLearningWebServiceFunctionBinding, JavaScriptFunctionBinding All required parameters must be populated in order to send to Azure. @@ -660,20 +657,18 @@ class FunctionBinding(_serialization.Model): _subtype_map = { "type": { - "Microsoft.MachineLearning/WebService": "AzureMachineLearningStudioFunctionBinding", - "Microsoft.MachineLearningServices": "AzureMachineLearningServiceFunctionBinding", - "Microsoft.StreamAnalytics/CLRUdf": "CSharpFunctionBinding", + "Microsoft.MachineLearning/WebService": "AzureMachineLearningWebServiceFunctionBinding", "Microsoft.StreamAnalytics/JavascriptUdf": "JavaScriptFunctionBinding", } } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) - self.type = None # type: Optional[str] + self.type: Optional[str] = None -class AzureMachineLearningServiceFunctionBinding(FunctionBinding): +class AzureMachineLearningWebServiceFunctionBinding(FunctionBinding): """The binding to an Azure Machine Learning web service. All required parameters must be populated in order to send to Azure. @@ -681,27 +676,20 @@ class AzureMachineLearningServiceFunctionBinding(FunctionBinding): :ivar type: Indicates the function binding type. Required. :vartype type: str :ivar endpoint: The Request-Response execute endpoint of the Azure Machine Learning web - service. + service. Find out more here: + https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning-consume-web-services#request-response-service-rrs. :vartype endpoint: str :ivar api_key: The API key used to authenticate with Request-Response endpoint. :vartype api_key: str :ivar inputs: The inputs for the Azure Machine Learning web service endpoint. - :vartype inputs: - list[~azure.mgmt.streamanalytics.models.AzureMachineLearningServiceInputColumn] + :vartype inputs: ~azure.mgmt.streamanalytics.models.AzureMachineLearningWebServiceInputs :ivar outputs: A list of outputs from the Azure Machine Learning web service endpoint execution. :vartype outputs: - list[~azure.mgmt.streamanalytics.models.AzureMachineLearningServiceOutputColumn] + list[~azure.mgmt.streamanalytics.models.AzureMachineLearningWebServiceOutputColumn] :ivar batch_size: Number between 1 and 10000 describing maximum number of rows for every Azure ML RRS execute request. Default is 1000. :vartype batch_size: int - :ivar number_of_parallel_requests: The number of parallel requests that will be sent per - partition of your job to the machine learning service. Default is 1. - :vartype number_of_parallel_requests: int - :ivar input_request_name: Label for the input request object. - :vartype input_request_name: str - :ivar output_response_name: Label for the output request object. - :vartype output_response_name: str """ _validation = { @@ -712,12 +700,9 @@ class AzureMachineLearningServiceFunctionBinding(FunctionBinding): "type": {"key": "type", "type": "str"}, "endpoint": {"key": "properties.endpoint", "type": "str"}, "api_key": {"key": "properties.apiKey", "type": "str"}, - "inputs": {"key": "properties.inputs", "type": "[AzureMachineLearningServiceInputColumn]"}, - "outputs": {"key": "properties.outputs", "type": "[AzureMachineLearningServiceOutputColumn]"}, + "inputs": {"key": "properties.inputs", "type": "AzureMachineLearningWebServiceInputs"}, + "outputs": {"key": "properties.outputs", "type": "[AzureMachineLearningWebServiceOutputColumn]"}, "batch_size": {"key": "properties.batchSize", "type": "int"}, - "number_of_parallel_requests": {"key": "properties.numberOfParallelRequests", "type": "int"}, - "input_request_name": {"key": "properties.inputRequestName", "type": "str"}, - "output_response_name": {"key": "properties.outputResponseName", "type": "str"}, } def __init__( @@ -725,57 +710,42 @@ def __init__( *, endpoint: Optional[str] = None, api_key: Optional[str] = None, - inputs: Optional[List["_models.AzureMachineLearningServiceInputColumn"]] = None, - outputs: Optional[List["_models.AzureMachineLearningServiceOutputColumn"]] = None, + inputs: Optional["_models.AzureMachineLearningWebServiceInputs"] = None, + outputs: Optional[List["_models.AzureMachineLearningWebServiceOutputColumn"]] = None, batch_size: Optional[int] = None, - number_of_parallel_requests: Optional[int] = None, - input_request_name: Optional[str] = None, - output_response_name: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword endpoint: The Request-Response execute endpoint of the Azure Machine Learning web - service. + service. Find out more here: + https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning-consume-web-services#request-response-service-rrs. :paramtype endpoint: str :keyword api_key: The API key used to authenticate with Request-Response endpoint. :paramtype api_key: str :keyword inputs: The inputs for the Azure Machine Learning web service endpoint. - :paramtype inputs: - list[~azure.mgmt.streamanalytics.models.AzureMachineLearningServiceInputColumn] + :paramtype inputs: ~azure.mgmt.streamanalytics.models.AzureMachineLearningWebServiceInputs :keyword outputs: A list of outputs from the Azure Machine Learning web service endpoint execution. :paramtype outputs: - list[~azure.mgmt.streamanalytics.models.AzureMachineLearningServiceOutputColumn] + list[~azure.mgmt.streamanalytics.models.AzureMachineLearningWebServiceOutputColumn] :keyword batch_size: Number between 1 and 10000 describing maximum number of rows for every Azure ML RRS execute request. Default is 1000. :paramtype batch_size: int - :keyword number_of_parallel_requests: The number of parallel requests that will be sent per - partition of your job to the machine learning service. Default is 1. - :paramtype number_of_parallel_requests: int - :keyword input_request_name: Label for the input request object. - :paramtype input_request_name: str - :keyword output_response_name: Label for the output request object. - :paramtype output_response_name: str """ super().__init__(**kwargs) - self.type = "Microsoft.MachineLearningServices" # type: str + self.type: str = "Microsoft.MachineLearning/WebService" self.endpoint = endpoint self.api_key = api_key self.inputs = inputs self.outputs = outputs self.batch_size = batch_size - self.number_of_parallel_requests = number_of_parallel_requests - self.input_request_name = input_request_name - self.output_response_name = output_response_name class FunctionRetrieveDefaultDefinitionParameters(_serialization.Model): """Parameters used to specify the type of function to retrieve the default definition for. You probably want to use the sub-classes and not this class directly. Known sub-classes are: - AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters, - AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters, - CSharpFunctionRetrieveDefaultDefinitionParameters, + AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters, JavaScriptFunctionRetrieveDefaultDefinitionParameters All required parameters must be populated in order to send to Azure. @@ -794,247 +764,29 @@ class FunctionRetrieveDefaultDefinitionParameters(_serialization.Model): _subtype_map = { "binding_type": { - "Microsoft.MachineLearning/WebService": "AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters", - "Microsoft.MachineLearningServices": "AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters", - "Microsoft.StreamAnalytics/CLRUdf": "CSharpFunctionRetrieveDefaultDefinitionParameters", + "Microsoft.MachineLearning/WebService": "AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters", "Microsoft.StreamAnalytics/JavascriptUdf": "JavaScriptFunctionRetrieveDefaultDefinitionParameters", } } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) - self.binding_type = None # type: Optional[str] + self.binding_type: Optional[str] = None -class AzureMachineLearningServiceFunctionRetrieveDefaultDefinitionParameters( +class AzureMachineLearningWebServiceFunctionRetrieveDefaultDefinitionParameters( FunctionRetrieveDefaultDefinitionParameters ): - """The parameters needed to retrieve the default function definition for an Azure Machine Learning web service function. + """The parameters needed to retrieve the default function definition for an Azure Machine Learning + web service function. All required parameters must be populated in order to send to Azure. :ivar binding_type: Indicates the function binding type. Required. :vartype binding_type: str :ivar execute_endpoint: The Request-Response execute endpoint of the Azure Machine Learning web - service. - :vartype execute_endpoint: str - :ivar udf_type: The function type. Default value is "Scalar". - :vartype udf_type: str - """ - - _validation = { - "binding_type": {"required": True}, - } - - _attribute_map = { - "binding_type": {"key": "bindingType", "type": "str"}, - "execute_endpoint": {"key": "bindingRetrievalProperties.executeEndpoint", "type": "str"}, - "udf_type": {"key": "bindingRetrievalProperties.udfType", "type": "str"}, - } - - def __init__( - self, *, execute_endpoint: Optional[str] = None, udf_type: Optional[Literal["Scalar"]] = None, **kwargs - ): - """ - :keyword execute_endpoint: The Request-Response execute endpoint of the Azure Machine Learning - web service. - :paramtype execute_endpoint: str - :keyword udf_type: The function type. Default value is "Scalar". - :paramtype udf_type: str - """ - super().__init__(**kwargs) - self.binding_type = "Microsoft.MachineLearningServices" # type: str - self.execute_endpoint = execute_endpoint - self.udf_type = udf_type - - -class AzureMachineLearningServiceInputColumn(_serialization.Model): - """Describes an input column for the Azure Machine Learning web service endpoint. - - :ivar name: The name of the input column. - :vartype name: str - :ivar data_type: The (Azure Machine Learning supported) data type of the input column. - :vartype data_type: str - :ivar map_to: The zero based index of the function parameter this input maps to. - :vartype map_to: int - """ - - _attribute_map = { - "name": {"key": "name", "type": "str"}, - "data_type": {"key": "dataType", "type": "str"}, - "map_to": {"key": "mapTo", "type": "int"}, - } - - def __init__( - self, *, name: Optional[str] = None, data_type: Optional[str] = None, map_to: Optional[int] = None, **kwargs - ): - """ - :keyword name: The name of the input column. - :paramtype name: str - :keyword data_type: The (Azure Machine Learning supported) data type of the input column. - :paramtype data_type: str - :keyword map_to: The zero based index of the function parameter this input maps to. - :paramtype map_to: int - """ - super().__init__(**kwargs) - self.name = name - self.data_type = data_type - self.map_to = map_to - - -class AzureMachineLearningServiceInputs(_serialization.Model): - """The inputs for the Azure Machine Learning web service endpoint. - - :ivar name: The name of the input. This is the name provided while authoring the endpoint. - :vartype name: str - :ivar column_names: A list of input columns for the Azure Machine Learning web service - endpoint. - :vartype column_names: - list[~azure.mgmt.streamanalytics.models.AzureMachineLearningServiceInputColumn] - """ - - _attribute_map = { - "name": {"key": "name", "type": "str"}, - "column_names": {"key": "columnNames", "type": "[AzureMachineLearningServiceInputColumn]"}, - } - - def __init__( - self, - *, - name: Optional[str] = None, - column_names: Optional[List["_models.AzureMachineLearningServiceInputColumn"]] = None, - **kwargs - ): - """ - :keyword name: The name of the input. This is the name provided while authoring the endpoint. - :paramtype name: str - :keyword column_names: A list of input columns for the Azure Machine Learning web service - endpoint. - :paramtype column_names: - list[~azure.mgmt.streamanalytics.models.AzureMachineLearningServiceInputColumn] - """ - super().__init__(**kwargs) - self.name = name - self.column_names = column_names - - -class AzureMachineLearningServiceOutputColumn(_serialization.Model): - """Describes an output column for the Azure Machine Learning web service endpoint. - - :ivar name: The name of the output column. - :vartype name: str - :ivar data_type: The (Azure Machine Learning supported) data type of the output column. - :vartype data_type: str - :ivar map_to: The zero based index of the function parameter this input maps to. - :vartype map_to: int - """ - - _attribute_map = { - "name": {"key": "name", "type": "str"}, - "data_type": {"key": "dataType", "type": "str"}, - "map_to": {"key": "mapTo", "type": "int"}, - } - - def __init__( - self, *, name: Optional[str] = None, data_type: Optional[str] = None, map_to: Optional[int] = None, **kwargs - ): - """ - :keyword name: The name of the output column. - :paramtype name: str - :keyword data_type: The (Azure Machine Learning supported) data type of the output column. - :paramtype data_type: str - :keyword map_to: The zero based index of the function parameter this input maps to. - :paramtype map_to: int - """ - super().__init__(**kwargs) - self.name = name - self.data_type = data_type - self.map_to = map_to - - -class AzureMachineLearningStudioFunctionBinding(FunctionBinding): - """The binding to an Azure Machine Learning Studio. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Indicates the function binding type. Required. - :vartype type: str - :ivar endpoint: The Request-Response execute endpoint of the Azure Machine Learning Studio. - Find out more here: - https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning-consume-web-services#request-response-service-rrs. - :vartype endpoint: str - :ivar api_key: The API key used to authenticate with Request-Response endpoint. - :vartype api_key: str - :ivar inputs: The inputs for the Azure Machine Learning Studio endpoint. - :vartype inputs: ~azure.mgmt.streamanalytics.models.AzureMachineLearningStudioInputs - :ivar outputs: A list of outputs from the Azure Machine Learning Studio endpoint execution. - :vartype outputs: - list[~azure.mgmt.streamanalytics.models.AzureMachineLearningStudioOutputColumn] - :ivar batch_size: Number between 1 and 10000 describing maximum number of rows for every Azure - ML RRS execute request. Default is 1000. - :vartype batch_size: int - """ - - _validation = { - "type": {"required": True}, - } - - _attribute_map = { - "type": {"key": "type", "type": "str"}, - "endpoint": {"key": "properties.endpoint", "type": "str"}, - "api_key": {"key": "properties.apiKey", "type": "str"}, - "inputs": {"key": "properties.inputs", "type": "AzureMachineLearningStudioInputs"}, - "outputs": {"key": "properties.outputs", "type": "[AzureMachineLearningStudioOutputColumn]"}, - "batch_size": {"key": "properties.batchSize", "type": "int"}, - } - - def __init__( - self, - *, - endpoint: Optional[str] = None, - api_key: Optional[str] = None, - inputs: Optional["_models.AzureMachineLearningStudioInputs"] = None, - outputs: Optional[List["_models.AzureMachineLearningStudioOutputColumn"]] = None, - batch_size: Optional[int] = None, - **kwargs - ): - """ - :keyword endpoint: The Request-Response execute endpoint of the Azure Machine Learning Studio. - Find out more here: - https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning-consume-web-services#request-response-service-rrs. - :paramtype endpoint: str - :keyword api_key: The API key used to authenticate with Request-Response endpoint. - :paramtype api_key: str - :keyword inputs: The inputs for the Azure Machine Learning Studio endpoint. - :paramtype inputs: ~azure.mgmt.streamanalytics.models.AzureMachineLearningStudioInputs - :keyword outputs: A list of outputs from the Azure Machine Learning Studio endpoint execution. - :paramtype outputs: - list[~azure.mgmt.streamanalytics.models.AzureMachineLearningStudioOutputColumn] - :keyword batch_size: Number between 1 and 10000 describing maximum number of rows for every - Azure ML RRS execute request. Default is 1000. - :paramtype batch_size: int - """ - super().__init__(**kwargs) - self.type = "Microsoft.MachineLearning/WebService" # type: str - self.endpoint = endpoint - self.api_key = api_key - self.inputs = inputs - self.outputs = outputs - self.batch_size = batch_size - - -class AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters( - FunctionRetrieveDefaultDefinitionParameters -): - """The parameters needed to retrieve the default function definition for an Azure Machine Learning Studio function. - - All required parameters must be populated in order to send to Azure. - - :ivar binding_type: Indicates the function binding type. Required. - :vartype binding_type: str - :ivar execute_endpoint: The Request-Response execute endpoint of the Azure Machine Learning - Studio. Find out more here: + service. Find out more here: https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning-consume-web-services#request-response-service-rrs. :vartype execute_endpoint: str :ivar udf_type: The function type. Default value is "Scalar". @@ -1052,24 +804,24 @@ class AzureMachineLearningStudioFunctionRetrieveDefaultDefinitionParameters( } def __init__( - self, *, execute_endpoint: Optional[str] = None, udf_type: Optional[Literal["Scalar"]] = None, **kwargs - ): + self, *, execute_endpoint: Optional[str] = None, udf_type: Optional[Literal["Scalar"]] = None, **kwargs: Any + ) -> None: """ :keyword execute_endpoint: The Request-Response execute endpoint of the Azure Machine Learning - Studio. Find out more here: + web service. Find out more here: https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning-consume-web-services#request-response-service-rrs. :paramtype execute_endpoint: str :keyword udf_type: The function type. Default value is "Scalar". :paramtype udf_type: str """ super().__init__(**kwargs) - self.binding_type = "Microsoft.MachineLearning/WebService" # type: str + self.binding_type: str = "Microsoft.MachineLearning/WebService" self.execute_endpoint = execute_endpoint self.udf_type = udf_type -class AzureMachineLearningStudioInputColumn(_serialization.Model): - """Describes an input column for the Azure Machine Learning Studio endpoint. +class AzureMachineLearningWebServiceInputColumn(_serialization.Model): + """Describes an input column for the Azure Machine Learning web service endpoint. :ivar name: The name of the input column. :vartype name: str @@ -1088,8 +840,13 @@ class AzureMachineLearningStudioInputColumn(_serialization.Model): } def __init__( - self, *, name: Optional[str] = None, data_type: Optional[str] = None, map_to: Optional[int] = None, **kwargs - ): + self, + *, + name: Optional[str] = None, + data_type: Optional[str] = None, + map_to: Optional[int] = None, + **kwargs: Any + ) -> None: """ :keyword name: The name of the input column. :paramtype name: str @@ -1106,42 +863,44 @@ def __init__( self.map_to = map_to -class AzureMachineLearningStudioInputs(_serialization.Model): - """The inputs for the Azure Machine Learning Studio endpoint. +class AzureMachineLearningWebServiceInputs(_serialization.Model): + """The inputs for the Azure Machine Learning web service endpoint. :ivar name: The name of the input. This is the name provided while authoring the endpoint. :vartype name: str - :ivar column_names: A list of input columns for the Azure Machine Learning Studio endpoint. + :ivar column_names: A list of input columns for the Azure Machine Learning web service + endpoint. :vartype column_names: - list[~azure.mgmt.streamanalytics.models.AzureMachineLearningStudioInputColumn] + list[~azure.mgmt.streamanalytics.models.AzureMachineLearningWebServiceInputColumn] """ _attribute_map = { "name": {"key": "name", "type": "str"}, - "column_names": {"key": "columnNames", "type": "[AzureMachineLearningStudioInputColumn]"}, + "column_names": {"key": "columnNames", "type": "[AzureMachineLearningWebServiceInputColumn]"}, } def __init__( self, *, name: Optional[str] = None, - column_names: Optional[List["_models.AzureMachineLearningStudioInputColumn"]] = None, - **kwargs - ): + column_names: Optional[List["_models.AzureMachineLearningWebServiceInputColumn"]] = None, + **kwargs: Any + ) -> None: """ :keyword name: The name of the input. This is the name provided while authoring the endpoint. :paramtype name: str - :keyword column_names: A list of input columns for the Azure Machine Learning Studio endpoint. + :keyword column_names: A list of input columns for the Azure Machine Learning web service + endpoint. :paramtype column_names: - list[~azure.mgmt.streamanalytics.models.AzureMachineLearningStudioInputColumn] + list[~azure.mgmt.streamanalytics.models.AzureMachineLearningWebServiceInputColumn] """ super().__init__(**kwargs) self.name = name self.column_names = column_names -class AzureMachineLearningStudioOutputColumn(_serialization.Model): - """Describes an output column for the Azure Machine Learning Studio endpoint. +class AzureMachineLearningWebServiceOutputColumn(_serialization.Model): + """Describes an output column for the Azure Machine Learning web service endpoint. :ivar name: The name of the output column. :vartype name: str @@ -1156,7 +915,7 @@ class AzureMachineLearningStudioOutputColumn(_serialization.Model): "data_type": {"key": "dataType", "type": "str"}, } - def __init__(self, *, name: Optional[str] = None, data_type: Optional[str] = None, **kwargs): + def __init__(self, *, name: Optional[str] = None, data_type: Optional[str] = None, **kwargs: Any) -> None: """ :keyword name: The name of the output column. :paramtype name: str @@ -1220,8 +979,8 @@ def __init__( max_batch_count: Optional[float] = None, max_writer_count: Optional[float] = None, authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword server: The name of the SQL server containing the Azure SQL database. Required on PUT (CreateOrReplace) requests. @@ -1319,8 +1078,8 @@ def __init__( max_batch_count: Optional[float] = None, max_writer_count: Optional[float] = None, authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword server: The name of the SQL server containing the Azure SQL database. Required on PUT (CreateOrReplace) requests. @@ -1348,7 +1107,7 @@ def __init__( :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode """ super().__init__(**kwargs) - self.type = "Microsoft.Sql/Server/Database" # type: str + self.type: str = "Microsoft.Sql/Server/Database" self.server = server self.database = database self.user = user @@ -1409,8 +1168,8 @@ def __init__( max_batch_count: Optional[float] = None, max_writer_count: Optional[float] = None, authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword server: The name of the SQL server containing the Azure SQL database. Required on PUT (CreateOrReplace) requests. @@ -1454,8 +1213,7 @@ class ReferenceInputDataSource(_serialization.Model): """Describes an input data source that contains reference data. You probably want to use the sub-classes and not this class directly. Known sub-classes are: - FileReferenceInputDataSource, AzureSqlReferenceInputDataSource, BlobReferenceInputDataSource, - RawReferenceInputDataSource + FileReferenceInputDataSource, AzureSqlReferenceInputDataSource, BlobReferenceInputDataSource All required parameters must be populated in order to send to Azure. @@ -1477,14 +1235,13 @@ class ReferenceInputDataSource(_serialization.Model): "File": "FileReferenceInputDataSource", "Microsoft.Sql/Server/Database": "AzureSqlReferenceInputDataSource", "Microsoft.Storage/Blob": "BlobReferenceInputDataSource", - "Raw": "RawReferenceInputDataSource", } } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) - self.type = None # type: Optional[str] + self.type: Optional[str] = None class AzureSqlReferenceInputDataSource(ReferenceInputDataSource): @@ -1507,6 +1264,9 @@ class AzureSqlReferenceInputDataSource(ReferenceInputDataSource): :ivar password: This element is associated with the datasource element. This is the password that will be used to connect to the SQL Database instance. :vartype password: str + :ivar table: This element is associated with the datasource element. The name of the table in + the Azure SQL database.. + :vartype table: str :ivar refresh_type: Indicates the type of data refresh option. Known values are: "Static", "RefreshPeriodicallyWithFull", and "RefreshPeriodicallyWithDelta". :vartype refresh_type: str or ~azure.mgmt.streamanalytics.models.RefreshType @@ -1520,9 +1280,6 @@ class AzureSqlReferenceInputDataSource(ReferenceInputDataSource): is used to fetch incremental changes from the SQL database. To use this option, we recommend using temporal tables in Azure SQL Database. :vartype delta_snapshot_query: str - :ivar authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and - "ConnectionString". - :vartype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode """ _validation = { @@ -1535,11 +1292,11 @@ class AzureSqlReferenceInputDataSource(ReferenceInputDataSource): "database": {"key": "properties.database", "type": "str"}, "user": {"key": "properties.user", "type": "str"}, "password": {"key": "properties.password", "type": "str"}, + "table": {"key": "properties.table", "type": "str"}, "refresh_type": {"key": "properties.refreshType", "type": "str"}, "refresh_rate": {"key": "properties.refreshRate", "type": "str"}, "full_snapshot_query": {"key": "properties.fullSnapshotQuery", "type": "str"}, "delta_snapshot_query": {"key": "properties.deltaSnapshotQuery", "type": "str"}, - "authentication_mode": {"key": "properties.authenticationMode", "type": "str"}, } def __init__( @@ -1549,13 +1306,13 @@ def __init__( database: Optional[str] = None, user: Optional[str] = None, password: Optional[str] = None, + table: Optional[str] = None, refresh_type: Optional[Union[str, "_models.RefreshType"]] = None, refresh_rate: Optional[str] = None, full_snapshot_query: Optional[str] = None, delta_snapshot_query: Optional[str] = None, - authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword server: This element is associated with the datasource element. This is the name of the server that contains the database that will be written to. @@ -1569,6 +1326,9 @@ def __init__( :keyword password: This element is associated with the datasource element. This is the password that will be used to connect to the SQL Database instance. :paramtype password: str + :keyword table: This element is associated with the datasource element. The name of the table + in the Azure SQL database.. + :paramtype table: str :keyword refresh_type: Indicates the type of data refresh option. Known values are: "Static", "RefreshPeriodicallyWithFull", and "RefreshPeriodicallyWithDelta". :paramtype refresh_type: str or ~azure.mgmt.streamanalytics.models.RefreshType @@ -1582,21 +1342,18 @@ def __init__( query is used to fetch incremental changes from the SQL database. To use this option, we recommend using temporal tables in Azure SQL Database. :paramtype delta_snapshot_query: str - :keyword authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and - "ConnectionString". - :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode """ super().__init__(**kwargs) - self.type = "Microsoft.Sql/Server/Database" # type: str + self.type: str = "Microsoft.Sql/Server/Database" self.server = server self.database = database self.user = user self.password = password + self.table = table self.refresh_type = refresh_type self.refresh_rate = refresh_rate self.full_snapshot_query = full_snapshot_query self.delta_snapshot_query = delta_snapshot_query - self.authentication_mode = authentication_mode class AzureSynapseDataSourceProperties(_serialization.Model): @@ -1616,9 +1373,6 @@ class AzureSynapseDataSourceProperties(_serialization.Model): :ivar password: The password that will be used to connect to the Azure SQL database. Required on PUT (CreateOrReplace) requests. :vartype password: str - :ivar authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and - "ConnectionString". - :vartype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode """ _attribute_map = { @@ -1627,7 +1381,6 @@ class AzureSynapseDataSourceProperties(_serialization.Model): "table": {"key": "table", "type": "str"}, "user": {"key": "user", "type": "str"}, "password": {"key": "password", "type": "str"}, - "authentication_mode": {"key": "authenticationMode", "type": "str"}, } def __init__( @@ -1638,9 +1391,8 @@ def __init__( table: Optional[str] = None, user: Optional[str] = None, password: Optional[str] = None, - authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword server: The name of the SQL server containing the Azure SQL database. Required on PUT (CreateOrReplace) requests. @@ -1657,9 +1409,6 @@ def __init__( :keyword password: The password that will be used to connect to the Azure SQL database. Required on PUT (CreateOrReplace) requests. :paramtype password: str - :keyword authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and - "ConnectionString". - :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode """ super().__init__(**kwargs) self.server = server @@ -1667,7 +1416,6 @@ def __init__( self.table = table self.user = user self.password = password - self.authentication_mode = authentication_mode class AzureSynapseOutputDataSource(OutputDataSource): @@ -1692,9 +1440,6 @@ class AzureSynapseOutputDataSource(OutputDataSource): :ivar password: The password that will be used to connect to the Azure SQL database. Required on PUT (CreateOrReplace) requests. :vartype password: str - :ivar authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and - "ConnectionString". - :vartype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode """ _validation = { @@ -1708,7 +1453,6 @@ class AzureSynapseOutputDataSource(OutputDataSource): "table": {"key": "properties.table", "type": "str"}, "user": {"key": "properties.user", "type": "str"}, "password": {"key": "properties.password", "type": "str"}, - "authentication_mode": {"key": "properties.authenticationMode", "type": "str"}, } def __init__( @@ -1719,9 +1463,8 @@ def __init__( table: Optional[str] = None, user: Optional[str] = None, password: Optional[str] = None, - authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword server: The name of the SQL server containing the Azure SQL database. Required on PUT (CreateOrReplace) requests. @@ -1738,18 +1481,14 @@ def __init__( :keyword password: The password that will be used to connect to the Azure SQL database. Required on PUT (CreateOrReplace) requests. :paramtype password: str - :keyword authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and - "ConnectionString". - :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode """ super().__init__(**kwargs) - self.type = "Microsoft.Sql/Server/DataWarehouse" # type: str + self.type: str = "Microsoft.Sql/Server/DataWarehouse" self.server = server self.database = database self.table = table self.user = user self.password = password - self.authentication_mode = authentication_mode class AzureSynapseOutputDataSourceProperties(AzureSynapseDataSourceProperties): @@ -1769,9 +1508,6 @@ class AzureSynapseOutputDataSourceProperties(AzureSynapseDataSourceProperties): :ivar password: The password that will be used to connect to the Azure SQL database. Required on PUT (CreateOrReplace) requests. :vartype password: str - :ivar authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and - "ConnectionString". - :vartype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode """ _attribute_map = { @@ -1780,7 +1516,6 @@ class AzureSynapseOutputDataSourceProperties(AzureSynapseDataSourceProperties): "table": {"key": "table", "type": "str"}, "user": {"key": "user", "type": "str"}, "password": {"key": "password", "type": "str"}, - "authentication_mode": {"key": "authenticationMode", "type": "str"}, } def __init__( @@ -1791,9 +1526,8 @@ def __init__( table: Optional[str] = None, user: Optional[str] = None, password: Optional[str] = None, - authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword server: The name of the SQL server containing the Azure SQL database. Required on PUT (CreateOrReplace) requests. @@ -1810,19 +1544,8 @@ def __init__( :keyword password: The password that will be used to connect to the Azure SQL database. Required on PUT (CreateOrReplace) requests. :paramtype password: str - :keyword authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and - "ConnectionString". - :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode """ - super().__init__( - server=server, - database=database, - table=table, - user=user, - password=password, - authentication_mode=authentication_mode, - **kwargs - ) + super().__init__(server=server, database=database, table=table, user=user, password=password, **kwargs) class AzureTableOutputDataSource(OutputDataSource): @@ -1881,8 +1604,8 @@ def __init__( row_key: Optional[str] = None, columns_to_remove: Optional[List[str]] = None, batch_size: Optional[int] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword account_name: The name of the Azure Storage account. Required on PUT (CreateOrReplace) requests. @@ -1907,7 +1630,7 @@ def __init__( :paramtype batch_size: int """ super().__init__(**kwargs) - self.type = "Microsoft.Storage/Table" # type: str + self.type: str = "Microsoft.Storage/Table" self.account_name = account_name self.account_key = account_key self.table = table @@ -1963,8 +1686,8 @@ def __init__( date_format: Optional[str] = None, time_format: Optional[str] = None, authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword storage_accounts: A list of one or more Azure Storage accounts. Required on PUT (CreateOrReplace) requests. @@ -2032,8 +1755,6 @@ class BlobOutputDataSource(OutputDataSource): :vartype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode :ivar blob_path_prefix: Blob path prefix. :vartype blob_path_prefix: str - :ivar blob_write_mode: Blob write mode. Known values are: "Append" and "Once". - :vartype blob_write_mode: str or ~azure.mgmt.streamanalytics.models.BlobWriteMode """ _validation = { @@ -2049,7 +1770,6 @@ class BlobOutputDataSource(OutputDataSource): "time_format": {"key": "properties.timeFormat", "type": "str"}, "authentication_mode": {"key": "properties.authenticationMode", "type": "str"}, "blob_path_prefix": {"key": "properties.blobPathPrefix", "type": "str"}, - "blob_write_mode": {"key": "properties.blobWriteMode", "type": "str"}, } def __init__( @@ -2062,9 +1782,8 @@ def __init__( time_format: Optional[str] = None, authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", blob_path_prefix: Optional[str] = None, - blob_write_mode: Optional[Union[str, "_models.BlobWriteMode"]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword storage_accounts: A list of one or more Azure Storage accounts. Required on PUT (CreateOrReplace) requests. @@ -2091,11 +1810,9 @@ def __init__( :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode :keyword blob_path_prefix: Blob path prefix. :paramtype blob_path_prefix: str - :keyword blob_write_mode: Blob write mode. Known values are: "Append" and "Once". - :paramtype blob_write_mode: str or ~azure.mgmt.streamanalytics.models.BlobWriteMode """ super().__init__(**kwargs) - self.type = "Microsoft.Storage/Blob" # type: str + self.type: str = "Microsoft.Storage/Blob" self.storage_accounts = storage_accounts self.container = container self.path_pattern = path_pattern @@ -2103,7 +1820,6 @@ def __init__( self.time_format = time_format self.authentication_mode = authentication_mode self.blob_path_prefix = blob_path_prefix - self.blob_write_mode = blob_write_mode class BlobOutputDataSourceProperties(BlobDataSourceProperties): @@ -2134,8 +1850,6 @@ class BlobOutputDataSourceProperties(BlobDataSourceProperties): :vartype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode :ivar blob_path_prefix: Blob path prefix. :vartype blob_path_prefix: str - :ivar blob_write_mode: Blob write mode. Known values are: "Append" and "Once". - :vartype blob_write_mode: str or ~azure.mgmt.streamanalytics.models.BlobWriteMode """ _attribute_map = { @@ -2146,7 +1860,6 @@ class BlobOutputDataSourceProperties(BlobDataSourceProperties): "time_format": {"key": "timeFormat", "type": "str"}, "authentication_mode": {"key": "authenticationMode", "type": "str"}, "blob_path_prefix": {"key": "blobPathPrefix", "type": "str"}, - "blob_write_mode": {"key": "blobWriteMode", "type": "str"}, } def __init__( @@ -2159,9 +1872,8 @@ def __init__( time_format: Optional[str] = None, authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", blob_path_prefix: Optional[str] = None, - blob_write_mode: Optional[Union[str, "_models.BlobWriteMode"]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword storage_accounts: A list of one or more Azure Storage accounts. Required on PUT (CreateOrReplace) requests. @@ -2188,8 +1900,6 @@ def __init__( :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode :keyword blob_path_prefix: Blob path prefix. :paramtype blob_path_prefix: str - :keyword blob_write_mode: Blob write mode. Known values are: "Append" and "Once". - :paramtype blob_write_mode: str or ~azure.mgmt.streamanalytics.models.BlobWriteMode """ super().__init__( storage_accounts=storage_accounts, @@ -2201,10 +1911,9 @@ def __init__( **kwargs ) self.blob_path_prefix = blob_path_prefix - self.blob_write_mode = blob_write_mode -class BlobReferenceInputDataSource(ReferenceInputDataSource): # pylint: disable=too-many-instance-attributes +class BlobReferenceInputDataSource(ReferenceInputDataSource): """Describes a blob input data source that contains reference data. All required parameters must be populated in order to send to Azure. @@ -2235,17 +1944,6 @@ class BlobReferenceInputDataSource(ReferenceInputDataSource): # pylint: disable :ivar authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and "ConnectionString". :vartype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode - :ivar blob_name: The name of the blob input. - :vartype blob_name: str - :ivar delta_path_pattern: The path pattern of the delta snapshot. - :vartype delta_path_pattern: str - :ivar source_partition_count: The partition count of the blob input data source. Range 1 - 256. - :vartype source_partition_count: int - :ivar full_snapshot_refresh_rate: The refresh interval of the blob input data source. - :vartype full_snapshot_refresh_rate: str - :ivar delta_snapshot_refresh_rate: The interval that the user generates a delta snapshot of - this reference blob input data source. - :vartype delta_snapshot_refresh_rate: str """ _validation = { @@ -2260,11 +1958,6 @@ class BlobReferenceInputDataSource(ReferenceInputDataSource): # pylint: disable "date_format": {"key": "properties.dateFormat", "type": "str"}, "time_format": {"key": "properties.timeFormat", "type": "str"}, "authentication_mode": {"key": "properties.authenticationMode", "type": "str"}, - "blob_name": {"key": "properties.blobName", "type": "str"}, - "delta_path_pattern": {"key": "properties.deltaPathPattern", "type": "str"}, - "source_partition_count": {"key": "properties.sourcePartitionCount", "type": "int"}, - "full_snapshot_refresh_rate": {"key": "properties.fullSnapshotRefreshRate", "type": "str"}, - "delta_snapshot_refresh_rate": {"key": "properties.deltaSnapshotRefreshRate", "type": "str"}, } def __init__( @@ -2276,13 +1969,8 @@ def __init__( date_format: Optional[str] = None, time_format: Optional[str] = None, authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", - blob_name: Optional[str] = None, - delta_path_pattern: Optional[str] = None, - source_partition_count: Optional[int] = None, - full_snapshot_refresh_rate: Optional[str] = None, - delta_snapshot_refresh_rate: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword storage_accounts: A list of one or more Azure Storage accounts. Required on PUT (CreateOrReplace) requests. @@ -2307,35 +1995,18 @@ def __init__( :keyword authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and "ConnectionString". :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode - :keyword blob_name: The name of the blob input. - :paramtype blob_name: str - :keyword delta_path_pattern: The path pattern of the delta snapshot. - :paramtype delta_path_pattern: str - :keyword source_partition_count: The partition count of the blob input data source. Range 1 - - 256. - :paramtype source_partition_count: int - :keyword full_snapshot_refresh_rate: The refresh interval of the blob input data source. - :paramtype full_snapshot_refresh_rate: str - :keyword delta_snapshot_refresh_rate: The interval that the user generates a delta snapshot of - this reference blob input data source. - :paramtype delta_snapshot_refresh_rate: str """ super().__init__(**kwargs) - self.type = "Microsoft.Storage/Blob" # type: str + self.type: str = "Microsoft.Storage/Blob" self.storage_accounts = storage_accounts self.container = container self.path_pattern = path_pattern self.date_format = date_format self.time_format = time_format self.authentication_mode = authentication_mode - self.blob_name = blob_name - self.delta_path_pattern = delta_path_pattern - self.source_partition_count = source_partition_count - self.full_snapshot_refresh_rate = full_snapshot_refresh_rate - self.delta_snapshot_refresh_rate = delta_snapshot_refresh_rate -class BlobReferenceInputDataSourceProperties(BlobDataSourceProperties): # pylint: disable=too-many-instance-attributes +class BlobReferenceInputDataSourceProperties(BlobDataSourceProperties): """The properties that are associated with a blob input containing reference data. :ivar storage_accounts: A list of one or more Azure Storage accounts. Required on PUT @@ -2361,17 +2032,6 @@ class BlobReferenceInputDataSourceProperties(BlobDataSourceProperties): # pylin :ivar authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and "ConnectionString". :vartype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode - :ivar blob_name: The name of the blob input. - :vartype blob_name: str - :ivar delta_path_pattern: The path pattern of the delta snapshot. - :vartype delta_path_pattern: str - :ivar source_partition_count: The partition count of the blob input data source. Range 1 - 256. - :vartype source_partition_count: int - :ivar full_snapshot_refresh_rate: The refresh interval of the blob input data source. - :vartype full_snapshot_refresh_rate: str - :ivar delta_snapshot_refresh_rate: The interval that the user generates a delta snapshot of - this reference blob input data source. - :vartype delta_snapshot_refresh_rate: str """ _attribute_map = { @@ -2381,11 +2041,6 @@ class BlobReferenceInputDataSourceProperties(BlobDataSourceProperties): # pylin "date_format": {"key": "dateFormat", "type": "str"}, "time_format": {"key": "timeFormat", "type": "str"}, "authentication_mode": {"key": "authenticationMode", "type": "str"}, - "blob_name": {"key": "blobName", "type": "str"}, - "delta_path_pattern": {"key": "deltaPathPattern", "type": "str"}, - "source_partition_count": {"key": "sourcePartitionCount", "type": "int"}, - "full_snapshot_refresh_rate": {"key": "fullSnapshotRefreshRate", "type": "str"}, - "delta_snapshot_refresh_rate": {"key": "deltaSnapshotRefreshRate", "type": "str"}, } def __init__( @@ -2397,13 +2052,8 @@ def __init__( date_format: Optional[str] = None, time_format: Optional[str] = None, authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", - blob_name: Optional[str] = None, - delta_path_pattern: Optional[str] = None, - source_partition_count: Optional[int] = None, - full_snapshot_refresh_rate: Optional[str] = None, - delta_snapshot_refresh_rate: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword storage_accounts: A list of one or more Azure Storage accounts. Required on PUT (CreateOrReplace) requests. @@ -2428,18 +2078,6 @@ def __init__( :keyword authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and "ConnectionString". :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode - :keyword blob_name: The name of the blob input. - :paramtype blob_name: str - :keyword delta_path_pattern: The path pattern of the delta snapshot. - :paramtype delta_path_pattern: str - :keyword source_partition_count: The partition count of the blob input data source. Range 1 - - 256. - :paramtype source_partition_count: int - :keyword full_snapshot_refresh_rate: The refresh interval of the blob input data source. - :paramtype full_snapshot_refresh_rate: str - :keyword delta_snapshot_refresh_rate: The interval that the user generates a delta snapshot of - this reference blob input data source. - :paramtype delta_snapshot_refresh_rate: str """ super().__init__( storage_accounts=storage_accounts, @@ -2450,11 +2088,6 @@ def __init__( authentication_mode=authentication_mode, **kwargs ) - self.blob_name = blob_name - self.delta_path_pattern = delta_path_pattern - self.source_partition_count = source_partition_count - self.full_snapshot_refresh_rate = full_snapshot_refresh_rate - self.delta_snapshot_refresh_rate = delta_snapshot_refresh_rate class StreamInputDataSource(_serialization.Model): @@ -2462,8 +2095,7 @@ class StreamInputDataSource(_serialization.Model): You probably want to use the sub-classes and not this class directly. Known sub-classes are: GatewayMessageBusStreamInputDataSource, IoTHubStreamInputDataSource, - EventGridStreamInputDataSource, EventHubV2StreamInputDataSource, EventHubStreamInputDataSource, - BlobStreamInputDataSource, RawStreamInputDataSource + EventHubV2StreamInputDataSource, EventHubStreamInputDataSource, BlobStreamInputDataSource All required parameters must be populated in order to send to Azure. @@ -2484,18 +2116,16 @@ class StreamInputDataSource(_serialization.Model): "type": { "GatewayMessageBus": "GatewayMessageBusStreamInputDataSource", "Microsoft.Devices/IotHubs": "IoTHubStreamInputDataSource", - "Microsoft.EventGrid/EventSubscriptions": "EventGridStreamInputDataSource", "Microsoft.EventHub/EventHub": "EventHubV2StreamInputDataSource", "Microsoft.ServiceBus/EventHub": "EventHubStreamInputDataSource", "Microsoft.Storage/Blob": "BlobStreamInputDataSource", - "Raw": "RawStreamInputDataSource", } } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) - self.type = None # type: Optional[str] + self.type: Optional[str] = None class BlobStreamInputDataSource(StreamInputDataSource): @@ -2559,8 +2189,8 @@ def __init__( time_format: Optional[str] = None, authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", source_partition_count: Optional[int] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword storage_accounts: A list of one or more Azure Storage accounts. Required on PUT (CreateOrReplace) requests. @@ -2590,7 +2220,7 @@ def __init__( :paramtype source_partition_count: int """ super().__init__(**kwargs) - self.type = "Microsoft.Storage/Blob" # type: str + self.type: str = "Microsoft.Storage/Blob" self.storage_accounts = storage_accounts self.container = container self.path_pattern = path_pattern @@ -2651,8 +2281,8 @@ def __init__( time_format: Optional[str] = None, authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", source_partition_count: Optional[int] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword storage_accounts: A list of one or more Azure Storage accounts. Required on PUT (CreateOrReplace) requests. @@ -2720,7 +2350,7 @@ class Resource(_serialization.Model): "type": {"key": "type", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.id = None @@ -2761,7 +2391,7 @@ class TrackedResource(Resource): "location": {"key": "location", "type": "str"}, } - def __init__(self, *, tags: Optional[Dict[str, str]] = None, location: Optional[str] = None, **kwargs): + def __init__(self, *, tags: Optional[Dict[str, str]] = None, location: Optional[str] = None, **kwargs: Any) -> None: """ :keyword tags: Resource tags. :paramtype tags: dict[str, str] @@ -2773,7 +2403,7 @@ def __init__(self, *, tags: Optional[Dict[str, str]] = None, location: Optional[ self.location = location -class Cluster(TrackedResource): +class Cluster(TrackedResource): # pylint: disable=too-many-instance-attributes """A Stream Analytics Cluster object. Variables are only populated by the server, and will be ignored when sending a request. @@ -2797,26 +2427,47 @@ class Cluster(TrackedResource): detect whether the resource has changed between requests. You can also use it in the If-Match or If-None-Match headers for write operations for optimistic concurrency. :vartype etag: str - :ivar properties: The properties associated with a Stream Analytics cluster. - :vartype properties: ~azure.mgmt.streamanalytics.models.ClusterProperties - """ - - _validation = { - "id": {"readonly": True}, - "name": {"readonly": True}, - "type": {"readonly": True}, - "etag": {"readonly": True}, - } - - _attribute_map = { - "id": {"key": "id", "type": "str"}, - "name": {"key": "name", "type": "str"}, + :ivar created_date: The date this cluster was created. + :vartype created_date: ~datetime.datetime + :ivar cluster_id: Unique identifier for the cluster. + :vartype cluster_id: str + :ivar provisioning_state: The status of the cluster provisioning. The three terminal states + are: Succeeded, Failed and Canceled. Known values are: "Succeeded", "Failed", "Canceled", and + "InProgress". + :vartype provisioning_state: str or ~azure.mgmt.streamanalytics.models.ClusterProvisioningState + :ivar capacity_allocated: Represents the number of streaming units currently being used on the + cluster. + :vartype capacity_allocated: int + :ivar capacity_assigned: Represents the sum of the SUs of all streaming jobs associated with + the cluster. If all of the jobs were running, this would be the capacity allocated. + :vartype capacity_assigned: int + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "etag": {"readonly": True}, + "created_date": {"readonly": True}, + "cluster_id": {"readonly": True}, + "provisioning_state": {"readonly": True}, + "capacity_allocated": {"readonly": True}, + "capacity_assigned": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "tags": {"key": "tags", "type": "{str}"}, "location": {"key": "location", "type": "str"}, "sku": {"key": "sku", "type": "ClusterSku"}, "etag": {"key": "etag", "type": "str"}, - "properties": {"key": "properties", "type": "ClusterProperties"}, + "created_date": {"key": "properties.createdDate", "type": "iso-8601"}, + "cluster_id": {"key": "properties.clusterId", "type": "str"}, + "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, + "capacity_allocated": {"key": "properties.capacityAllocated", "type": "int"}, + "capacity_assigned": {"key": "properties.capacityAssigned", "type": "int"}, } def __init__( @@ -2825,9 +2476,8 @@ def __init__( tags: Optional[Dict[str, str]] = None, location: Optional[str] = None, sku: Optional["_models.ClusterSku"] = None, - properties: Optional["_models.ClusterProperties"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword tags: Resource tags. :paramtype tags: dict[str, str] @@ -2836,13 +2486,15 @@ def __init__( :keyword sku: The SKU of the cluster. This determines the size/capacity of the cluster. Required on PUT (CreateOrUpdate) requests. :paramtype sku: ~azure.mgmt.streamanalytics.models.ClusterSku - :keyword properties: The properties associated with a Stream Analytics cluster. - :paramtype properties: ~azure.mgmt.streamanalytics.models.ClusterProperties """ super().__init__(tags=tags, location=location, **kwargs) self.sku = sku self.etag = None - self.properties = properties + self.created_date = None + self.cluster_id = None + self.provisioning_state = None + self.capacity_allocated = None + self.capacity_assigned = None class ClusterInfo(_serialization.Model): @@ -2856,7 +2508,7 @@ class ClusterInfo(_serialization.Model): "id": {"key": "id", "type": "str"}, } - def __init__(self, *, id: Optional[str] = None, **kwargs): # pylint: disable=redefined-builtin + def __init__(self, *, id: Optional[str] = None, **kwargs: Any) -> None: # pylint: disable=redefined-builtin """ :keyword id: The resource id of cluster. :paramtype id: str @@ -2892,7 +2544,7 @@ class ClusterJob(_serialization.Model): "job_state": {"key": "jobState", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.id = None @@ -2921,7 +2573,7 @@ class ClusterJobListResult(_serialization.Model): "next_link": {"key": "nextLink", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.value = None @@ -2949,62 +2601,16 @@ class ClusterListResult(_serialization.Model): "next_link": {"key": "nextLink", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.value = None self.next_link = None -class ClusterProperties(_serialization.Model): - """The properties associated with a Stream Analytics cluster. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar created_date: The date this cluster was created. - :vartype created_date: ~datetime.datetime - :ivar cluster_id: Unique identifier for the cluster. - :vartype cluster_id: str - :ivar provisioning_state: The status of the cluster provisioning. The three terminal states - are: Succeeded, Failed and Canceled. Known values are: "Succeeded", "Failed", "Canceled", and - "InProgress". - :vartype provisioning_state: str or ~azure.mgmt.streamanalytics.models.ClusterProvisioningState - :ivar capacity_allocated: Represents the number of streaming units currently being used on the - cluster. - :vartype capacity_allocated: int - :ivar capacity_assigned: Represents the sum of the SUs of all streaming jobs associated with - the cluster. If all of the jobs were running, this would be the capacity allocated. - :vartype capacity_assigned: int - """ - - _validation = { - "created_date": {"readonly": True}, - "cluster_id": {"readonly": True}, - "provisioning_state": {"readonly": True}, - "capacity_allocated": {"readonly": True}, - "capacity_assigned": {"readonly": True}, - } - - _attribute_map = { - "created_date": {"key": "createdDate", "type": "iso-8601"}, - "cluster_id": {"key": "clusterId", "type": "str"}, - "provisioning_state": {"key": "provisioningState", "type": "str"}, - "capacity_allocated": {"key": "capacityAllocated", "type": "int"}, - "capacity_assigned": {"key": "capacityAssigned", "type": "int"}, - } - - def __init__(self, **kwargs): - """ """ - super().__init__(**kwargs) - self.created_date = None - self.cluster_id = None - self.provisioning_state = None - self.capacity_allocated = None - self.capacity_assigned = None - - class ClusterSku(_serialization.Model): - """The SKU of the cluster. This determines the size/capacity of the cluster. Required on PUT (CreateOrUpdate) requests. + """The SKU of the cluster. This determines the size/capacity of the cluster. Required on PUT + (CreateOrUpdate) requests. :ivar name: Specifies the SKU name of the cluster. Required on PUT (CreateOrUpdate) requests. "Default" @@ -3025,8 +2631,12 @@ class ClusterSku(_serialization.Model): } def __init__( - self, *, name: Optional[Union[str, "_models.ClusterSkuName"]] = None, capacity: Optional[int] = None, **kwargs - ): + self, + *, + name: Optional[Union[str, "_models.ClusterSkuName"]] = None, + capacity: Optional[int] = None, + **kwargs: Any + ) -> None: """ :keyword name: Specifies the SKU name of the cluster. Required on PUT (CreateOrUpdate) requests. "Default" @@ -3041,68 +2651,6 @@ def __init__( self.capacity = capacity -class CompileQuery(_serialization.Model): - """The query compilation object which defines the input, output, and transformation for the query compilation. - - All required parameters must be populated in order to send to Azure. - - :ivar query: The query to compile. Required. - :vartype query: str - :ivar inputs: The inputs for the query compilation. - :vartype inputs: list[~azure.mgmt.streamanalytics.models.QueryInput] - :ivar functions: The functions for the query compilation. - :vartype functions: list[~azure.mgmt.streamanalytics.models.QueryFunction] - :ivar job_type: Describes the type of the job. Valid values are ``Cloud`` and 'Edge'. Required. - Known values are: "Cloud" and "Edge". - :vartype job_type: str or ~azure.mgmt.streamanalytics.models.JobType - :ivar compatibility_level: The query to compile. Known values are: "1.0" and "1.2". - :vartype compatibility_level: str or ~azure.mgmt.streamanalytics.models.CompatibilityLevel - """ - - _validation = { - "query": {"required": True}, - "job_type": {"required": True}, - } - - _attribute_map = { - "query": {"key": "query", "type": "str"}, - "inputs": {"key": "inputs", "type": "[QueryInput]"}, - "functions": {"key": "functions", "type": "[QueryFunction]"}, - "job_type": {"key": "jobType", "type": "str"}, - "compatibility_level": {"key": "compatibilityLevel", "type": "str"}, - } - - def __init__( - self, - *, - query: str, - job_type: Union[str, "_models.JobType"], - inputs: Optional[List["_models.QueryInput"]] = None, - functions: Optional[List["_models.QueryFunction"]] = None, - compatibility_level: Optional[Union[str, "_models.CompatibilityLevel"]] = None, - **kwargs - ): - """ - :keyword query: The query to compile. Required. - :paramtype query: str - :keyword inputs: The inputs for the query compilation. - :paramtype inputs: list[~azure.mgmt.streamanalytics.models.QueryInput] - :keyword functions: The functions for the query compilation. - :paramtype functions: list[~azure.mgmt.streamanalytics.models.QueryFunction] - :keyword job_type: Describes the type of the job. Valid values are ``Cloud`` and 'Edge'. - Required. Known values are: "Cloud" and "Edge". - :paramtype job_type: str or ~azure.mgmt.streamanalytics.models.JobType - :keyword compatibility_level: The query to compile. Known values are: "1.0" and "1.2". - :paramtype compatibility_level: str or ~azure.mgmt.streamanalytics.models.CompatibilityLevel - """ - super().__init__(**kwargs) - self.query = query - self.inputs = inputs - self.functions = functions - self.job_type = job_type - self.compatibility_level = compatibility_level - - class Compression(_serialization.Model): """Describes how input data is compressed. @@ -3121,7 +2669,7 @@ class Compression(_serialization.Model): "type": {"key": "type", "type": "str"}, } - def __init__(self, *, type: Union[str, "_models.CompressionType"] = "None", **kwargs): + def __init__(self, *, type: Union[str, "_models.CompressionType"] = "None", **kwargs: Any) -> None: """ :keyword type: Indicates the type of compression that the input uses. Required on PUT (CreateOrReplace) requests. Known values are: "None", "GZip", and "Deflate". @@ -3131,108 +2679,14 @@ def __init__(self, *, type: Union[str, "_models.CompressionType"] = "None", **kw self.type = type -class CSharpFunctionBinding(FunctionBinding): - """The binding to a CSharp function. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Indicates the function binding type. Required. - :vartype type: str - :ivar dll_path: The Csharp code containing a single function definition. - :vartype dll_path: str - :ivar class_property: The Csharp code containing a single function definition. - :vartype class_property: str - :ivar method: The Csharp code containing a single function definition. - :vartype method: str - :ivar update_mode: Refresh modes for Stream Analytics functions. Known values are: "Static" and - "Refreshable". - :vartype update_mode: str or ~azure.mgmt.streamanalytics.models.UpdateMode - """ - - _validation = { - "type": {"required": True}, - } - - _attribute_map = { - "type": {"key": "type", "type": "str"}, - "dll_path": {"key": "properties.dllPath", "type": "str"}, - "class_property": {"key": "properties.class", "type": "str"}, - "method": {"key": "properties.method", "type": "str"}, - "update_mode": {"key": "properties.updateMode", "type": "str"}, - } - - def __init__( - self, - *, - dll_path: Optional[str] = None, - class_property: Optional[str] = None, - method: Optional[str] = None, - update_mode: Optional[Union[str, "_models.UpdateMode"]] = None, - **kwargs - ): - """ - :keyword dll_path: The Csharp code containing a single function definition. - :paramtype dll_path: str - :keyword class_property: The Csharp code containing a single function definition. - :paramtype class_property: str - :keyword method: The Csharp code containing a single function definition. - :paramtype method: str - :keyword update_mode: Refresh modes for Stream Analytics functions. Known values are: "Static" - and "Refreshable". - :paramtype update_mode: str or ~azure.mgmt.streamanalytics.models.UpdateMode - """ - super().__init__(**kwargs) - self.type = "Microsoft.StreamAnalytics/CLRUdf" # type: str - self.dll_path = dll_path - self.class_property = class_property - self.method = method - self.update_mode = update_mode - - -class CSharpFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefaultDefinitionParameters): - """The parameters needed to retrieve the default function definition for a CSharp function. - - All required parameters must be populated in order to send to Azure. - - :ivar binding_type: Indicates the function binding type. Required. - :vartype binding_type: str - :ivar script: The CSharp code containing a single function definition. - :vartype script: str - :ivar udf_type: The function type. Default value is "Scalar". - :vartype udf_type: str - """ - - _validation = { - "binding_type": {"required": True}, - } - - _attribute_map = { - "binding_type": {"key": "bindingType", "type": "str"}, - "script": {"key": "bindingRetrievalProperties.script", "type": "str"}, - "udf_type": {"key": "bindingRetrievalProperties.udfType", "type": "str"}, - } - - def __init__(self, *, script: Optional[str] = None, udf_type: Optional[Literal["Scalar"]] = None, **kwargs): - """ - :keyword script: The CSharp code containing a single function definition. - :paramtype script: str - :keyword udf_type: The function type. Default value is "Scalar". - :paramtype udf_type: str - """ - super().__init__(**kwargs) - self.binding_type = "Microsoft.StreamAnalytics/CLRUdf" # type: str - self.script = script - self.udf_type = udf_type - - class CsvSerialization(Serialization): - """Describes how data from an input is serialized or how data is serialized when written to an output in CSV format. + """Describes how data from an input is serialized or how data is serialized when written to an + output in CSV format. All required parameters must be populated in order to send to Azure. :ivar type: Indicates the type of serialization that the input or output uses. Required on PUT - (CreateOrReplace) requests. Required. Known values are: "Csv", "Avro", "Json", "CustomClr", and - "Parquet". + (CreateOrReplace) requests. Required. Known values are: "Csv", "Avro", "Json", and "Parquet". :vartype type: str or ~azure.mgmt.streamanalytics.models.EventSerializationType :ivar field_delimiter: Specifies the delimiter that will be used to separate comma-separated value (CSV) records. See @@ -3261,8 +2715,8 @@ def __init__( *, field_delimiter: Optional[str] = None, encoding: Optional[Union[str, "_models.Encoding"]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword field_delimiter: Specifies the delimiter that will be used to separate comma-separated value (CSV) records. See @@ -3276,51 +2730,11 @@ def __init__( :paramtype encoding: str or ~azure.mgmt.streamanalytics.models.Encoding """ super().__init__(**kwargs) - self.type = "Csv" # type: str + self.type: str = "Csv" self.field_delimiter = field_delimiter self.encoding = encoding -class CustomClrSerialization(Serialization): - """Describes how data from an input is serialized or how data is serialized when written to an output in custom format. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Indicates the type of serialization that the input or output uses. Required on PUT - (CreateOrReplace) requests. Required. Known values are: "Csv", "Avro", "Json", "CustomClr", and - "Parquet". - :vartype type: str or ~azure.mgmt.streamanalytics.models.EventSerializationType - :ivar serialization_dll_path: The serialization library path. - :vartype serialization_dll_path: str - :ivar serialization_class_name: The serialization class name. - :vartype serialization_class_name: str - """ - - _validation = { - "type": {"required": True}, - } - - _attribute_map = { - "type": {"key": "type", "type": "str"}, - "serialization_dll_path": {"key": "properties.serializationDllPath", "type": "str"}, - "serialization_class_name": {"key": "properties.serializationClassName", "type": "str"}, - } - - def __init__( - self, *, serialization_dll_path: Optional[str] = None, serialization_class_name: Optional[str] = None, **kwargs - ): - """ - :keyword serialization_dll_path: The serialization library path. - :paramtype serialization_dll_path: str - :keyword serialization_class_name: The serialization class name. - :paramtype serialization_class_name: str - """ - super().__init__(**kwargs) - self.type = "CustomClr" # type: str - self.serialization_dll_path = serialization_dll_path - self.serialization_class_name = serialization_class_name - - class DiagnosticCondition(_serialization.Model): """Condition applicable to the resource, or to the job overall, that warrant customer attention. @@ -3348,7 +2762,7 @@ class DiagnosticCondition(_serialization.Model): "message": {"key": "message", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.since = None @@ -3357,7 +2771,8 @@ def __init__(self, **kwargs): class Diagnostics(_serialization.Model): - """Describes conditions applicable to the Input, Output, or the job overall, that warrant customer attention. + """Describes conditions applicable to the Input, Output, or the job overall, that warrant customer + attention. Variables are only populated by the server, and will be ignored when sending a request. @@ -3374,7 +2789,7 @@ class Diagnostics(_serialization.Model): "conditions": {"key": "conditions", "type": "[DiagnosticCondition]"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.conditions = None @@ -3410,9 +2825,6 @@ class DocumentDbOutputDataSource(OutputDataSource): :ivar document_id: The name of the field in output events used to specify the primary key which insert or update operations are based on. :vartype document_id: str - :ivar authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and - "ConnectionString". - :vartype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode """ _validation = { @@ -3427,7 +2839,6 @@ class DocumentDbOutputDataSource(OutputDataSource): "collection_name_pattern": {"key": "properties.collectionNamePattern", "type": "str"}, "partition_key": {"key": "properties.partitionKey", "type": "str"}, "document_id": {"key": "properties.documentId", "type": "str"}, - "authentication_mode": {"key": "properties.authenticationMode", "type": "str"}, } def __init__( @@ -3439,9 +2850,8 @@ def __init__( collection_name_pattern: Optional[str] = None, partition_key: Optional[str] = None, document_id: Optional[str] = None, - authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword account_id: The DocumentDB account name or ID. Required on PUT (CreateOrReplace) requests. @@ -3465,19 +2875,15 @@ def __init__( :keyword document_id: The name of the field in output events used to specify the primary key which insert or update operations are based on. :paramtype document_id: str - :keyword authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and - "ConnectionString". - :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode """ super().__init__(**kwargs) - self.type = "Microsoft.Storage/DocumentDB" # type: str + self.type: str = "Microsoft.Storage/DocumentDB" self.account_id = account_id self.account_key = account_key self.database = database self.collection_name_pattern = collection_name_pattern self.partition_key = partition_key self.document_id = document_id - self.authentication_mode = authentication_mode class Error(_serialization.Model): @@ -3491,7 +2897,7 @@ class Error(_serialization.Model): "error": {"key": "error", "type": "ErrorError"}, } - def __init__(self, *, error: Optional["_models.ErrorError"] = None, **kwargs): + def __init__(self, *, error: Optional["_models.ErrorError"] = None, **kwargs: Any) -> None: """ :keyword error: Error definition properties. :paramtype error: ~azure.mgmt.streamanalytics.models.ErrorError @@ -3518,8 +2924,8 @@ class ErrorDetails(_serialization.Model): } def __init__( - self, *, code: Optional[str] = None, target: Optional[str] = None, message: Optional[str] = None, **kwargs - ): + self, *, code: Optional[str] = None, target: Optional[str] = None, message: Optional[str] = None, **kwargs: Any + ) -> None: """ :keyword code: Error code. :paramtype code: str @@ -3561,8 +2967,8 @@ def __init__( message: Optional[str] = None, target: Optional[str] = None, details: Optional[List["_models.ErrorDetails"]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword code: Error code. :paramtype code: str @@ -3601,78 +3007,16 @@ class ErrorResponse(_serialization.Model): "message": {"key": "message", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.code = None self.message = None -class EventGridStreamInputDataSource(StreamInputDataSource): - """Describes an event grid input data source that contains stream data. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Indicates the type of input data source containing stream data. Required on PUT - (CreateOrReplace) requests. Required. - :vartype type: str - :ivar subscriber: Subscribers for the Event Grid. Currently only EventHub Subscriber is - supported. - :vartype subscriber: ~azure.mgmt.streamanalytics.models.EventHubV2StreamInputDataSource - :ivar schema: Indicates the Event Grid schema type. Known values are: "EventGridEventSchema" - and "CloudEventSchema". - :vartype schema: str or ~azure.mgmt.streamanalytics.models.EventGridEventSchemaType - :ivar storage_accounts: A list of one or more Azure Storage accounts. Required on PUT - (CreateOrReplace) requests. - :vartype storage_accounts: list[~azure.mgmt.streamanalytics.models.StorageAccount] - :ivar event_types: List of Event Types that are supported by the Event Grid adapter. - :vartype event_types: list[str] - """ - - _validation = { - "type": {"required": True}, - } - - _attribute_map = { - "type": {"key": "type", "type": "str"}, - "subscriber": {"key": "properties.subscriber", "type": "EventHubV2StreamInputDataSource"}, - "schema": {"key": "properties.schema", "type": "str"}, - "storage_accounts": {"key": "properties.storageAccounts", "type": "[StorageAccount]"}, - "event_types": {"key": "properties.eventTypes", "type": "[str]"}, - } - - def __init__( - self, - *, - subscriber: Optional["_models.EventHubV2StreamInputDataSource"] = None, - schema: Optional[Union[str, "_models.EventGridEventSchemaType"]] = None, - storage_accounts: Optional[List["_models.StorageAccount"]] = None, - event_types: Optional[List[str]] = None, - **kwargs - ): - """ - :keyword subscriber: Subscribers for the Event Grid. Currently only EventHub Subscriber is - supported. - :paramtype subscriber: ~azure.mgmt.streamanalytics.models.EventHubV2StreamInputDataSource - :keyword schema: Indicates the Event Grid schema type. Known values are: "EventGridEventSchema" - and "CloudEventSchema". - :paramtype schema: str or ~azure.mgmt.streamanalytics.models.EventGridEventSchemaType - :keyword storage_accounts: A list of one or more Azure Storage accounts. Required on PUT - (CreateOrReplace) requests. - :paramtype storage_accounts: list[~azure.mgmt.streamanalytics.models.StorageAccount] - :keyword event_types: List of Event Types that are supported by the Event Grid adapter. - :paramtype event_types: list[str] - """ - super().__init__(**kwargs) - self.type = "Microsoft.EventGrid/EventSubscriptions" # type: str - self.subscriber = subscriber - self.schema = schema - self.storage_accounts = storage_accounts - self.event_types = event_types - - class ServiceBusDataSourceProperties(_serialization.Model): - """The common properties that are associated with Service Bus data sources (Queues, Topics, Event Hubs, etc.). + """The common properties that are associated with Service Bus data sources (Queues, Topics, Event + Hubs, etc.). :ivar service_bus_namespace: The namespace that is associated with the desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. @@ -3702,8 +3046,8 @@ def __init__( shared_access_policy_name: Optional[str] = None, shared_access_policy_key: Optional[str] = None, authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword service_bus_namespace: The namespace that is associated with the desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. @@ -3742,8 +3086,6 @@ class EventHubDataSourceProperties(ServiceBusDataSourceProperties): :vartype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode :ivar event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. :vartype event_hub_name: str - :ivar partition_count: The partition count of the event hub data source. Range 1 - 256. - :vartype partition_count: int """ _attribute_map = { @@ -3752,7 +3094,6 @@ class EventHubDataSourceProperties(ServiceBusDataSourceProperties): "shared_access_policy_key": {"key": "sharedAccessPolicyKey", "type": "str"}, "authentication_mode": {"key": "authenticationMode", "type": "str"}, "event_hub_name": {"key": "eventHubName", "type": "str"}, - "partition_count": {"key": "partitionCount", "type": "int"}, } def __init__( @@ -3763,9 +3104,8 @@ def __init__( shared_access_policy_key: Optional[str] = None, authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", event_hub_name: Optional[str] = None, - partition_count: Optional[int] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword service_bus_namespace: The namespace that is associated with the desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. @@ -3781,8 +3121,6 @@ def __init__( :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode :keyword event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. :paramtype event_hub_name: str - :keyword partition_count: The partition count of the event hub data source. Range 1 - 256. - :paramtype partition_count: int """ super().__init__( service_bus_namespace=service_bus_namespace, @@ -3792,7 +3130,6 @@ def __init__( **kwargs ) self.event_hub_name = event_hub_name - self.partition_count = partition_count class EventHubOutputDataSource(OutputDataSource): @@ -3817,8 +3154,6 @@ class EventHubOutputDataSource(OutputDataSource): :vartype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode :ivar event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. :vartype event_hub_name: str - :ivar partition_count: The partition count of the event hub data source. Range 1 - 256. - :vartype partition_count: int :ivar partition_key: The key/column that is used to determine to which partition to send event data. :vartype partition_key: str @@ -3837,7 +3172,6 @@ class EventHubOutputDataSource(OutputDataSource): "shared_access_policy_key": {"key": "properties.sharedAccessPolicyKey", "type": "str"}, "authentication_mode": {"key": "properties.authenticationMode", "type": "str"}, "event_hub_name": {"key": "properties.eventHubName", "type": "str"}, - "partition_count": {"key": "properties.partitionCount", "type": "int"}, "partition_key": {"key": "properties.partitionKey", "type": "str"}, "property_columns": {"key": "properties.propertyColumns", "type": "[str]"}, } @@ -3850,11 +3184,10 @@ def __init__( shared_access_policy_key: Optional[str] = None, authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", event_hub_name: Optional[str] = None, - partition_count: Optional[int] = None, partition_key: Optional[str] = None, property_columns: Optional[List[str]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword service_bus_namespace: The namespace that is associated with the desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. @@ -3870,8 +3203,6 @@ def __init__( :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode :keyword event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. :paramtype event_hub_name: str - :keyword partition_count: The partition count of the event hub data source. Range 1 - 256. - :paramtype partition_count: int :keyword partition_key: The key/column that is used to determine to which partition to send event data. :paramtype partition_key: str @@ -3879,13 +3210,12 @@ def __init__( :paramtype property_columns: list[str] """ super().__init__(**kwargs) - self.type = "Microsoft.ServiceBus/EventHub" # type: str + self.type: str = "Microsoft.ServiceBus/EventHub" self.service_bus_namespace = service_bus_namespace self.shared_access_policy_name = shared_access_policy_name self.shared_access_policy_key = shared_access_policy_key self.authentication_mode = authentication_mode self.event_hub_name = event_hub_name - self.partition_count = partition_count self.partition_key = partition_key self.property_columns = property_columns @@ -3907,8 +3237,6 @@ class EventHubOutputDataSourceProperties(EventHubDataSourceProperties): :vartype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode :ivar event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. :vartype event_hub_name: str - :ivar partition_count: The partition count of the event hub data source. Range 1 - 256. - :vartype partition_count: int :ivar partition_key: The key/column that is used to determine to which partition to send event data. :vartype partition_key: str @@ -3922,7 +3250,6 @@ class EventHubOutputDataSourceProperties(EventHubDataSourceProperties): "shared_access_policy_key": {"key": "sharedAccessPolicyKey", "type": "str"}, "authentication_mode": {"key": "authenticationMode", "type": "str"}, "event_hub_name": {"key": "eventHubName", "type": "str"}, - "partition_count": {"key": "partitionCount", "type": "int"}, "partition_key": {"key": "partitionKey", "type": "str"}, "property_columns": {"key": "propertyColumns", "type": "[str]"}, } @@ -3935,11 +3262,10 @@ def __init__( shared_access_policy_key: Optional[str] = None, authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", event_hub_name: Optional[str] = None, - partition_count: Optional[int] = None, partition_key: Optional[str] = None, property_columns: Optional[List[str]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword service_bus_namespace: The namespace that is associated with the desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. @@ -3955,8 +3281,6 @@ def __init__( :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode :keyword event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. :paramtype event_hub_name: str - :keyword partition_count: The partition count of the event hub data source. Range 1 - 256. - :paramtype partition_count: int :keyword partition_key: The key/column that is used to determine to which partition to send event data. :paramtype partition_key: str @@ -3969,7 +3293,6 @@ def __init__( shared_access_policy_key=shared_access_policy_key, authentication_mode=authentication_mode, event_hub_name=event_hub_name, - partition_count=partition_count, **kwargs ) self.partition_key = partition_key @@ -3998,16 +3321,11 @@ class EventHubStreamInputDataSource(StreamInputDataSource): :vartype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode :ivar event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. :vartype event_hub_name: str - :ivar partition_count: The partition count of the event hub data source. Range 1 - 256. - :vartype partition_count: int :ivar consumer_group_name: The name of an Event Hub Consumer Group that should be used to read events from the Event Hub. Specifying distinct consumer group names for multiple inputs allows each of those inputs to receive the same events from the Event Hub. If not specified, the input uses the Event Hub’s default consumer group. :vartype consumer_group_name: str - :ivar prefetch_count: The number of messages that the message receiver can simultaneously - request. - :vartype prefetch_count: int """ _validation = { @@ -4021,9 +3339,7 @@ class EventHubStreamInputDataSource(StreamInputDataSource): "shared_access_policy_key": {"key": "properties.sharedAccessPolicyKey", "type": "str"}, "authentication_mode": {"key": "properties.authenticationMode", "type": "str"}, "event_hub_name": {"key": "properties.eventHubName", "type": "str"}, - "partition_count": {"key": "properties.partitionCount", "type": "int"}, "consumer_group_name": {"key": "properties.consumerGroupName", "type": "str"}, - "prefetch_count": {"key": "properties.prefetchCount", "type": "int"}, } def __init__( @@ -4034,11 +3350,9 @@ def __init__( shared_access_policy_key: Optional[str] = None, authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", event_hub_name: Optional[str] = None, - partition_count: Optional[int] = None, consumer_group_name: Optional[str] = None, - prefetch_count: Optional[int] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword service_bus_namespace: The namespace that is associated with the desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. @@ -4054,27 +3368,20 @@ def __init__( :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode :keyword event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. :paramtype event_hub_name: str - :keyword partition_count: The partition count of the event hub data source. Range 1 - 256. - :paramtype partition_count: int :keyword consumer_group_name: The name of an Event Hub Consumer Group that should be used to read events from the Event Hub. Specifying distinct consumer group names for multiple inputs allows each of those inputs to receive the same events from the Event Hub. If not specified, the input uses the Event Hub’s default consumer group. :paramtype consumer_group_name: str - :keyword prefetch_count: The number of messages that the message receiver can simultaneously - request. - :paramtype prefetch_count: int """ super().__init__(**kwargs) - self.type = "Microsoft.ServiceBus/EventHub" # type: str + self.type: str = "Microsoft.ServiceBus/EventHub" self.service_bus_namespace = service_bus_namespace self.shared_access_policy_name = shared_access_policy_name self.shared_access_policy_key = shared_access_policy_key self.authentication_mode = authentication_mode self.event_hub_name = event_hub_name - self.partition_count = partition_count self.consumer_group_name = consumer_group_name - self.prefetch_count = prefetch_count class EventHubStreamInputDataSourceProperties(EventHubDataSourceProperties): @@ -4094,16 +3401,11 @@ class EventHubStreamInputDataSourceProperties(EventHubDataSourceProperties): :vartype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode :ivar event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. :vartype event_hub_name: str - :ivar partition_count: The partition count of the event hub data source. Range 1 - 256. - :vartype partition_count: int :ivar consumer_group_name: The name of an Event Hub Consumer Group that should be used to read events from the Event Hub. Specifying distinct consumer group names for multiple inputs allows each of those inputs to receive the same events from the Event Hub. If not specified, the input uses the Event Hub’s default consumer group. :vartype consumer_group_name: str - :ivar prefetch_count: The number of messages that the message receiver can simultaneously - request. - :vartype prefetch_count: int """ _attribute_map = { @@ -4112,9 +3414,7 @@ class EventHubStreamInputDataSourceProperties(EventHubDataSourceProperties): "shared_access_policy_key": {"key": "sharedAccessPolicyKey", "type": "str"}, "authentication_mode": {"key": "authenticationMode", "type": "str"}, "event_hub_name": {"key": "eventHubName", "type": "str"}, - "partition_count": {"key": "partitionCount", "type": "int"}, "consumer_group_name": {"key": "consumerGroupName", "type": "str"}, - "prefetch_count": {"key": "prefetchCount", "type": "int"}, } def __init__( @@ -4125,11 +3425,9 @@ def __init__( shared_access_policy_key: Optional[str] = None, authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", event_hub_name: Optional[str] = None, - partition_count: Optional[int] = None, consumer_group_name: Optional[str] = None, - prefetch_count: Optional[int] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword service_bus_namespace: The namespace that is associated with the desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. @@ -4145,16 +3443,11 @@ def __init__( :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode :keyword event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. :paramtype event_hub_name: str - :keyword partition_count: The partition count of the event hub data source. Range 1 - 256. - :paramtype partition_count: int :keyword consumer_group_name: The name of an Event Hub Consumer Group that should be used to read events from the Event Hub. Specifying distinct consumer group names for multiple inputs allows each of those inputs to receive the same events from the Event Hub. If not specified, the input uses the Event Hub’s default consumer group. :paramtype consumer_group_name: str - :keyword prefetch_count: The number of messages that the message receiver can simultaneously - request. - :paramtype prefetch_count: int """ super().__init__( service_bus_namespace=service_bus_namespace, @@ -4162,11 +3455,9 @@ def __init__( shared_access_policy_key=shared_access_policy_key, authentication_mode=authentication_mode, event_hub_name=event_hub_name, - partition_count=partition_count, **kwargs ) self.consumer_group_name = consumer_group_name - self.prefetch_count = prefetch_count class EventHubV2OutputDataSource(OutputDataSource): @@ -4191,8 +3482,6 @@ class EventHubV2OutputDataSource(OutputDataSource): :vartype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode :ivar event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. :vartype event_hub_name: str - :ivar partition_count: The partition count of the event hub data source. Range 1 - 256. - :vartype partition_count: int :ivar partition_key: The key/column that is used to determine to which partition to send event data. :vartype partition_key: str @@ -4211,7 +3500,6 @@ class EventHubV2OutputDataSource(OutputDataSource): "shared_access_policy_key": {"key": "properties.sharedAccessPolicyKey", "type": "str"}, "authentication_mode": {"key": "properties.authenticationMode", "type": "str"}, "event_hub_name": {"key": "properties.eventHubName", "type": "str"}, - "partition_count": {"key": "properties.partitionCount", "type": "int"}, "partition_key": {"key": "properties.partitionKey", "type": "str"}, "property_columns": {"key": "properties.propertyColumns", "type": "[str]"}, } @@ -4224,11 +3512,10 @@ def __init__( shared_access_policy_key: Optional[str] = None, authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", event_hub_name: Optional[str] = None, - partition_count: Optional[int] = None, partition_key: Optional[str] = None, property_columns: Optional[List[str]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword service_bus_namespace: The namespace that is associated with the desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. @@ -4244,8 +3531,6 @@ def __init__( :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode :keyword event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. :paramtype event_hub_name: str - :keyword partition_count: The partition count of the event hub data source. Range 1 - 256. - :paramtype partition_count: int :keyword partition_key: The key/column that is used to determine to which partition to send event data. :paramtype partition_key: str @@ -4253,13 +3538,12 @@ def __init__( :paramtype property_columns: list[str] """ super().__init__(**kwargs) - self.type = "Microsoft.EventHub/EventHub" # type: str + self.type: str = "Microsoft.EventHub/EventHub" self.service_bus_namespace = service_bus_namespace self.shared_access_policy_name = shared_access_policy_name self.shared_access_policy_key = shared_access_policy_key self.authentication_mode = authentication_mode self.event_hub_name = event_hub_name - self.partition_count = partition_count self.partition_key = partition_key self.property_columns = property_columns @@ -4286,16 +3570,11 @@ class EventHubV2StreamInputDataSource(StreamInputDataSource): :vartype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode :ivar event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. :vartype event_hub_name: str - :ivar partition_count: The partition count of the event hub data source. Range 1 - 256. - :vartype partition_count: int :ivar consumer_group_name: The name of an Event Hub Consumer Group that should be used to read events from the Event Hub. Specifying distinct consumer group names for multiple inputs allows each of those inputs to receive the same events from the Event Hub. If not specified, the input uses the Event Hub’s default consumer group. :vartype consumer_group_name: str - :ivar prefetch_count: The number of messages that the message receiver can simultaneously - request. - :vartype prefetch_count: int """ _validation = { @@ -4309,9 +3588,7 @@ class EventHubV2StreamInputDataSource(StreamInputDataSource): "shared_access_policy_key": {"key": "properties.sharedAccessPolicyKey", "type": "str"}, "authentication_mode": {"key": "properties.authenticationMode", "type": "str"}, "event_hub_name": {"key": "properties.eventHubName", "type": "str"}, - "partition_count": {"key": "properties.partitionCount", "type": "int"}, "consumer_group_name": {"key": "properties.consumerGroupName", "type": "str"}, - "prefetch_count": {"key": "properties.prefetchCount", "type": "int"}, } def __init__( @@ -4322,11 +3599,9 @@ def __init__( shared_access_policy_key: Optional[str] = None, authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", event_hub_name: Optional[str] = None, - partition_count: Optional[int] = None, consumer_group_name: Optional[str] = None, - prefetch_count: Optional[int] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword service_bus_namespace: The namespace that is associated with the desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. @@ -4342,75 +3617,20 @@ def __init__( :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode :keyword event_hub_name: The name of the Event Hub. Required on PUT (CreateOrReplace) requests. :paramtype event_hub_name: str - :keyword partition_count: The partition count of the event hub data source. Range 1 - 256. - :paramtype partition_count: int :keyword consumer_group_name: The name of an Event Hub Consumer Group that should be used to read events from the Event Hub. Specifying distinct consumer group names for multiple inputs allows each of those inputs to receive the same events from the Event Hub. If not specified, the input uses the Event Hub’s default consumer group. :paramtype consumer_group_name: str - :keyword prefetch_count: The number of messages that the message receiver can simultaneously - request. - :paramtype prefetch_count: int """ super().__init__(**kwargs) - self.type = "Microsoft.EventHub/EventHub" # type: str + self.type: str = "Microsoft.EventHub/EventHub" self.service_bus_namespace = service_bus_namespace self.shared_access_policy_name = shared_access_policy_name self.shared_access_policy_key = shared_access_policy_key self.authentication_mode = authentication_mode self.event_hub_name = event_hub_name - self.partition_count = partition_count self.consumer_group_name = consumer_group_name - self.prefetch_count = prefetch_count - - -class External(_serialization.Model): - """The storage account where the custom code artifacts are located. - - :ivar storage_account: The properties that are associated with an Azure Storage account. - :vartype storage_account: ~azure.mgmt.streamanalytics.models.StorageAccount - :ivar container: The UserCustomCode container. - :vartype container: str - :ivar path: The UserCustomCode path. - :vartype path: str - :ivar refresh_configuration: The refresh parameters for any/all updatable user defined - functions present in the job config. - :vartype refresh_configuration: ~azure.mgmt.streamanalytics.models.RefreshConfiguration - """ - - _attribute_map = { - "storage_account": {"key": "storageAccount", "type": "StorageAccount"}, - "container": {"key": "container", "type": "str"}, - "path": {"key": "path", "type": "str"}, - "refresh_configuration": {"key": "refreshConfiguration", "type": "RefreshConfiguration"}, - } - - def __init__( - self, - *, - storage_account: Optional["_models.StorageAccount"] = None, - container: Optional[str] = None, - path: Optional[str] = None, - refresh_configuration: Optional["_models.RefreshConfiguration"] = None, - **kwargs - ): - """ - :keyword storage_account: The properties that are associated with an Azure Storage account. - :paramtype storage_account: ~azure.mgmt.streamanalytics.models.StorageAccount - :keyword container: The UserCustomCode container. - :paramtype container: str - :keyword path: The UserCustomCode path. - :paramtype path: str - :keyword refresh_configuration: The refresh parameters for any/all updatable user defined - functions present in the job config. - :paramtype refresh_configuration: ~azure.mgmt.streamanalytics.models.RefreshConfiguration - """ - super().__init__(**kwargs) - self.storage_account = storage_account - self.container = container - self.path = path - self.refresh_configuration = refresh_configuration class FileReferenceInputDataSource(ReferenceInputDataSource): @@ -4434,13 +3654,13 @@ class FileReferenceInputDataSource(ReferenceInputDataSource): "path": {"key": "properties.path", "type": "str"}, } - def __init__(self, *, path: Optional[str] = None, **kwargs): + def __init__(self, *, path: Optional[str] = None, **kwargs: Any) -> None: """ :keyword path: The path of the file. :paramtype path: str """ super().__init__(**kwargs) - self.type = "File" # type: str + self.type: str = "File" self.path = path @@ -4468,7 +3688,7 @@ class SubResource(_serialization.Model): "type": {"key": "type", "type": "str"}, } - def __init__(self, *, name: Optional[str] = None, **kwargs): + def __init__(self, *, name: Optional[str] = None, **kwargs: Any) -> None: """ :keyword name: Resource name. :paramtype name: str @@ -4480,7 +3700,8 @@ def __init__(self, *, name: Optional[str] = None, **kwargs): class Function(SubResource): - """A function object, containing all information associated with the named function. All functions are contained under a streaming job. + """A function object, containing all information associated with the named function. All functions + are contained under a streaming job. Variables are only populated by the server, and will be ignored when sending a request. @@ -4507,8 +3728,8 @@ class Function(SubResource): } def __init__( - self, *, name: Optional[str] = None, properties: Optional["_models.FunctionProperties"] = None, **kwargs - ): + self, *, name: Optional[str] = None, properties: Optional["_models.FunctionProperties"] = None, **kwargs: Any + ) -> None: """ :keyword name: Resource name. :paramtype name: str @@ -4536,7 +3757,9 @@ class FunctionInput(_serialization.Model): "is_configuration_parameter": {"key": "isConfigurationParameter", "type": "bool"}, } - def __init__(self, *, data_type: Optional[str] = None, is_configuration_parameter: Optional[bool] = None, **kwargs): + def __init__( + self, *, data_type: Optional[str] = None, is_configuration_parameter: Optional[bool] = None, **kwargs: Any + ) -> None: """ :keyword data_type: The (Azure Stream Analytics supported) data type of the function input parameter. A list of valid Azure Stream Analytics data types are described at @@ -4572,7 +3795,7 @@ class FunctionListResult(_serialization.Model): "next_link": {"key": "nextLink", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.value = None @@ -4592,7 +3815,7 @@ class FunctionOutput(_serialization.Model): "data_type": {"key": "dataType", "type": "str"}, } - def __init__(self, *, data_type: Optional[str] = None, **kwargs): + def __init__(self, *, data_type: Optional[str] = None, **kwargs: Any) -> None: """ :keyword data_type: The (Azure Stream Analytics supported) data type of the function output. A list of valid Azure Stream Analytics data types are described at @@ -4624,13 +3847,13 @@ class GatewayMessageBusOutputDataSource(OutputDataSource): "topic": {"key": "properties.topic", "type": "str"}, } - def __init__(self, *, topic: Optional[str] = None, **kwargs): + def __init__(self, *, topic: Optional[str] = None, **kwargs: Any) -> None: """ :keyword topic: The name of the Service Bus topic. :paramtype topic: str """ super().__init__(**kwargs) - self.type = "GatewayMessageBus" # type: str + self.type: str = "GatewayMessageBus" self.topic = topic @@ -4645,7 +3868,7 @@ class GatewayMessageBusSourceProperties(_serialization.Model): "topic": {"key": "topic", "type": "str"}, } - def __init__(self, *, topic: Optional[str] = None, **kwargs): + def __init__(self, *, topic: Optional[str] = None, **kwargs: Any) -> None: """ :keyword topic: The name of the Service Bus topic. :paramtype topic: str @@ -4665,7 +3888,7 @@ class GatewayMessageBusOutputDataSourceProperties(GatewayMessageBusSourcePropert "topic": {"key": "topic", "type": "str"}, } - def __init__(self, *, topic: Optional[str] = None, **kwargs): + def __init__(self, *, topic: Optional[str] = None, **kwargs: Any) -> None: """ :keyword topic: The name of the Service Bus topic. :paramtype topic: str @@ -4694,13 +3917,13 @@ class GatewayMessageBusStreamInputDataSource(StreamInputDataSource): "topic": {"key": "properties.topic", "type": "str"}, } - def __init__(self, *, topic: Optional[str] = None, **kwargs): + def __init__(self, *, topic: Optional[str] = None, **kwargs: Any) -> None: """ :keyword topic: The name of the Service Bus topic. :paramtype topic: str """ super().__init__(**kwargs) - self.type = "GatewayMessageBus" # type: str + self.type: str = "GatewayMessageBus" self.topic = topic @@ -4715,7 +3938,7 @@ class GatewayMessageBusStreamInputDataSourceProperties(GatewayMessageBusSourcePr "topic": {"key": "topic", "type": "str"}, } - def __init__(self, *, topic: Optional[str] = None, **kwargs): + def __init__(self, *, topic: Optional[str] = None, **kwargs: Any) -> None: """ :keyword topic: The name of the Service Bus topic. :paramtype topic: str @@ -4723,135 +3946,44 @@ def __init__(self, *, topic: Optional[str] = None, **kwargs): super().__init__(topic=topic, **kwargs) -class GetStreamingJobSkuResult(_serialization.Model): - """Describes an available SKU information. +class Identity(_serialization.Model): + """Describes how identity is verified. Variables are only populated by the server, and will be ignored when sending a request. - :ivar resource_type: The type of resource the SKU applies to. - "Microsoft.StreamAnalytics/streamingjobs" - :vartype resource_type: str or ~azure.mgmt.streamanalytics.models.ResourceType - :ivar sku: The properties that are associated with a SKU. - :vartype sku: ~azure.mgmt.streamanalytics.models.GetStreamingJobSkuResultSku - :ivar capacity: Describes scaling information of a SKU. - :vartype capacity: ~azure.mgmt.streamanalytics.models.SkuCapacity + :ivar tenant_id: The identity tenantId. + :vartype tenant_id: str + :ivar principal_id: The identity principal ID. + :vartype principal_id: str + :ivar type: The identity type. + :vartype type: str """ _validation = { - "resource_type": {"readonly": True}, - "sku": {"readonly": True}, - "capacity": {"readonly": True}, - } - - _attribute_map = { - "resource_type": {"key": "resourceType", "type": "str"}, - "sku": {"key": "sku", "type": "GetStreamingJobSkuResultSku"}, - "capacity": {"key": "capacity", "type": "SkuCapacity"}, - } - - def __init__(self, **kwargs): - """ """ - super().__init__(**kwargs) - self.resource_type = None - self.sku = None - self.capacity = None - - -class GetStreamingJobSkuResults(_serialization.Model): - """Result of the request to get streaming job SKUs. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: The list of available SKUs that the streaming job can use. - :vartype value: list[~azure.mgmt.streamanalytics.models.GetStreamingJobSkuResult] - :ivar next_link: The link (url) to the next page of results. - :vartype next_link: str - """ - - _validation = { - "next_link": {"readonly": True}, - } - - _attribute_map = { - "value": {"key": "value", "type": "[GetStreamingJobSkuResult]"}, - "next_link": {"key": "nextLink", "type": "str"}, - } - - def __init__(self, *, value: Optional[List["_models.GetStreamingJobSkuResult"]] = None, **kwargs): - """ - :keyword value: The list of available SKUs that the streaming job can use. - :paramtype value: list[~azure.mgmt.streamanalytics.models.GetStreamingJobSkuResult] - """ - super().__init__(**kwargs) - self.value = value - self.next_link = None - - -class GetStreamingJobSkuResultSku(_serialization.Model): - """The properties that are associated with a SKU. - - :ivar name: The name of the SKU. "Standard" - :vartype name: str or ~azure.mgmt.streamanalytics.models.SkuName - """ - - _attribute_map = { - "name": {"key": "name", "type": "str"}, - } - - def __init__(self, *, name: Optional[Union[str, "_models.SkuName"]] = None, **kwargs): - """ - :keyword name: The name of the SKU. "Standard" - :paramtype name: str or ~azure.mgmt.streamanalytics.models.SkuName - """ - super().__init__(**kwargs) - self.name = name - - -class Identity(_serialization.Model): - """Describes how identity is verified. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar tenant_id: The tenantId of the identity. - :vartype tenant_id: str - :ivar principal_id: The principalId of the identity. - :vartype principal_id: str - :ivar type: The type of identity, can be SystemAssigned or UserAssigned. - :vartype type: str - :ivar user_assigned_identities: The user assigned identities associated with the streaming job - resource. - :vartype user_assigned_identities: JSON - """ - - _validation = { - "tenant_id": {"readonly": True}, - "principal_id": {"readonly": True}, + "tenant_id": {"readonly": True}, + "principal_id": {"readonly": True}, } _attribute_map = { "tenant_id": {"key": "tenantId", "type": "str"}, "principal_id": {"key": "principalId", "type": "str"}, "type": {"key": "type", "type": "str"}, - "user_assigned_identities": {"key": "userAssignedIdentities", "type": "object"}, } - def __init__(self, *, type: Optional[str] = None, user_assigned_identities: Optional[JSON] = None, **kwargs): + def __init__(self, *, type: Optional[str] = None, **kwargs: Any) -> None: """ - :keyword type: The type of identity, can be SystemAssigned or UserAssigned. + :keyword type: The identity type. :paramtype type: str - :keyword user_assigned_identities: The user assigned identities associated with the streaming - job resource. - :paramtype user_assigned_identities: JSON """ super().__init__(**kwargs) self.tenant_id = None self.principal_id = None self.type = type - self.user_assigned_identities = user_assigned_identities class Input(SubResource): - """An input object, containing all information associated with the named input. All inputs are contained under a streaming job. + """An input object, containing all information associated with the named input. All inputs are + contained under a streaming job. Variables are only populated by the server, and will be ignored when sending a request. @@ -4878,7 +4010,9 @@ class Input(SubResource): "properties": {"key": "properties", "type": "InputProperties"}, } - def __init__(self, *, name: Optional[str] = None, properties: Optional["_models.InputProperties"] = None, **kwargs): + def __init__( + self, *, name: Optional[str] = None, properties: Optional["_models.InputProperties"] = None, **kwargs: Any + ) -> None: """ :keyword name: Resource name. :paramtype name: str @@ -4911,7 +4045,7 @@ class InputListResult(_serialization.Model): "next_link": {"key": "nextLink", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.value = None @@ -4946,8 +4080,6 @@ class InputProperties(_serialization.Model): :ivar partition_key: partitionKey Describes a key in the input data which is used for partitioning the input data. :vartype partition_key: str - :ivar watermark_settings: Settings which determine whether to read watermark events. - :vartype watermark_settings: ~azure.mgmt.streamanalytics.models.InputWatermarkProperties """ _validation = { @@ -4963,7 +4095,6 @@ class InputProperties(_serialization.Model): "etag": {"key": "etag", "type": "str"}, "compression": {"key": "compression", "type": "Compression"}, "partition_key": {"key": "partitionKey", "type": "str"}, - "watermark_settings": {"key": "watermarkSettings", "type": "InputWatermarkProperties"}, } _subtype_map = {"type": {"Reference": "ReferenceInputProperties", "Stream": "StreamInputProperties"}} @@ -4974,9 +4105,8 @@ def __init__( serialization: Optional["_models.Serialization"] = None, compression: Optional["_models.Compression"] = None, partition_key: Optional[str] = None, - watermark_settings: Optional["_models.InputWatermarkProperties"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword serialization: Describes how data from an input is serialized or how data is serialized when written to an output. Required on PUT (CreateOrReplace) requests. @@ -4986,38 +4116,14 @@ def __init__( :keyword partition_key: partitionKey Describes a key in the input data which is used for partitioning the input data. :paramtype partition_key: str - :keyword watermark_settings: Settings which determine whether to read watermark events. - :paramtype watermark_settings: ~azure.mgmt.streamanalytics.models.InputWatermarkProperties """ super().__init__(**kwargs) - self.type = None # type: Optional[str] + self.type: Optional[str] = None self.serialization = serialization self.diagnostics = None self.etag = None self.compression = compression self.partition_key = partition_key - self.watermark_settings = watermark_settings - - -class InputWatermarkProperties(_serialization.Model): - """Settings which determine whether to read watermark events. - - :ivar watermark_mode: The input watermark mode. Known values are: "None" and "ReadWatermark". - :vartype watermark_mode: str or ~azure.mgmt.streamanalytics.models.InputWatermarkMode - """ - - _attribute_map = { - "watermark_mode": {"key": "watermarkMode", "type": "str"}, - } - - def __init__(self, *, watermark_mode: Optional[Union[str, "_models.InputWatermarkMode"]] = None, **kwargs): - """ - :keyword watermark_mode: The input watermark mode. Known values are: "None" and - "ReadWatermark". - :paramtype watermark_mode: str or ~azure.mgmt.streamanalytics.models.InputWatermarkMode - """ - super().__init__(**kwargs) - self.watermark_mode = watermark_mode class IoTHubStreamInputDataSource(StreamInputDataSource): @@ -5067,8 +4173,8 @@ def __init__( shared_access_policy_key: Optional[str] = None, consumer_group_name: Optional[str] = None, endpoint: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword iot_hub_namespace: The name or the URI of the IoT Hub. Required on PUT (CreateOrReplace) requests. @@ -5088,7 +4194,7 @@ def __init__( :paramtype endpoint: str """ super().__init__(**kwargs) - self.type = "Microsoft.Devices/IotHubs" # type: str + self.type: str = "Microsoft.Devices/IotHubs" self.iot_hub_namespace = iot_hub_namespace self.shared_access_policy_name = shared_access_policy_name self.shared_access_policy_key = shared_access_policy_key @@ -5117,14 +4223,14 @@ class JavaScriptFunctionBinding(FunctionBinding): "script": {"key": "properties.script", "type": "str"}, } - def __init__(self, *, script: Optional[str] = None, **kwargs): + def __init__(self, *, script: Optional[str] = None, **kwargs: Any) -> None: """ :keyword script: The JavaScript code containing a single function definition. For example: 'function (x, y) { return x + y; }'. :paramtype script: str """ super().__init__(**kwargs) - self.type = "Microsoft.StreamAnalytics/JavascriptUdf" # type: str + self.type: str = "Microsoft.StreamAnalytics/JavascriptUdf" self.script = script @@ -5152,7 +4258,9 @@ class JavaScriptFunctionRetrieveDefaultDefinitionParameters(FunctionRetrieveDefa "udf_type": {"key": "bindingRetrievalProperties.udfType", "type": "str"}, } - def __init__(self, *, script: Optional[str] = None, udf_type: Optional[Literal["Scalar"]] = None, **kwargs): + def __init__( + self, *, script: Optional[str] = None, udf_type: Optional[Literal["Scalar"]] = None, **kwargs: Any + ) -> None: """ :keyword script: The JavaScript code containing a single function definition. For example: 'function (x, y) { return x + y; }'. @@ -5161,7 +4269,7 @@ def __init__(self, *, script: Optional[str] = None, udf_type: Optional[Literal[" :paramtype udf_type: str """ super().__init__(**kwargs) - self.binding_type = "Microsoft.StreamAnalytics/JavascriptUdf" # type: str + self.binding_type: str = "Microsoft.StreamAnalytics/JavascriptUdf" self.script = script self.udf_type = udf_type @@ -5175,25 +4283,14 @@ class StorageAccount(_serialization.Model): :ivar account_key: The account key for the Azure Storage account. Required on PUT (CreateOrReplace) requests. :vartype account_key: str - :ivar authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and - "ConnectionString". - :vartype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode """ _attribute_map = { "account_name": {"key": "accountName", "type": "str"}, "account_key": {"key": "accountKey", "type": "str"}, - "authentication_mode": {"key": "authenticationMode", "type": "str"}, } - def __init__( - self, - *, - account_name: Optional[str] = None, - account_key: Optional[str] = None, - authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", - **kwargs - ): + def __init__(self, *, account_name: Optional[str] = None, account_key: Optional[str] = None, **kwargs: Any) -> None: """ :keyword account_name: The name of the Azure Storage account. Required on PUT (CreateOrReplace) requests. @@ -5201,14 +4298,10 @@ def __init__( :keyword account_key: The account key for the Azure Storage account. Required on PUT (CreateOrReplace) requests. :paramtype account_key: str - :keyword authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and - "ConnectionString". - :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode """ super().__init__(**kwargs) self.account_name = account_name self.account_key = account_key - self.authentication_mode = authentication_mode class JobStorageAccount(StorageAccount): @@ -5237,8 +4330,8 @@ def __init__( account_name: Optional[str] = None, account_key: Optional[str] = None, authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword account_name: The name of the Azure Storage account. Required on PUT (CreateOrReplace) requests. @@ -5250,19 +4343,18 @@ def __init__( "ConnectionString". :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode """ - super().__init__( - account_name=account_name, account_key=account_key, authentication_mode=authentication_mode, **kwargs - ) + super().__init__(account_name=account_name, account_key=account_key, **kwargs) + self.authentication_mode = authentication_mode class JsonSerialization(Serialization): - """Describes how data from an input is serialized or how data is serialized when written to an output in JSON format. + """Describes how data from an input is serialized or how data is serialized when written to an + output in JSON format. All required parameters must be populated in order to send to Azure. :ivar type: Indicates the type of serialization that the input or output uses. Required on PUT - (CreateOrReplace) requests. Required. Known values are: "Csv", "Avro", "Json", "CustomClr", and - "Parquet". + (CreateOrReplace) requests. Required. Known values are: "Csv", "Avro", "Json", and "Parquet". :vartype type: str or ~azure.mgmt.streamanalytics.models.EventSerializationType :ivar encoding: Specifies the encoding of the incoming data in the case of input and the encoding of outgoing data in the case of output. Required on PUT (CreateOrReplace) requests. @@ -5292,8 +4384,8 @@ def __init__( *, encoding: Optional[Union[str, "_models.Encoding"]] = None, format: Optional[Union[str, "_models.JsonOutputSerializationFormat"]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword encoding: Specifies the encoding of the incoming data in the case of input and the encoding of outgoing data in the case of output. Required on PUT (CreateOrReplace) requests. @@ -5308,39 +4400,11 @@ def __init__( :paramtype format: str or ~azure.mgmt.streamanalytics.models.JsonOutputSerializationFormat """ super().__init__(**kwargs) - self.type = "Json" # type: str + self.type: str = "Json" self.encoding = encoding self.format = format -class LastOutputEventTimestamp(_serialization.Model): - """An output event timestamp. - - :ivar last_output_event_time: The last output event time. - :vartype last_output_event_time: str - :ivar last_update_time: The time that the last update happened. - :vartype last_update_time: str - """ - - _attribute_map = { - "last_output_event_time": {"key": "lastOutputEventTime", "type": "str"}, - "last_update_time": {"key": "lastUpdateTime", "type": "str"}, - } - - def __init__( - self, *, last_output_event_time: Optional[str] = None, last_update_time: Optional[str] = None, **kwargs - ): - """ - :keyword last_output_event_time: The last output event time. - :paramtype last_output_event_time: str - :keyword last_update_time: The time that the last update happened. - :paramtype last_update_time: str - """ - super().__init__(**kwargs) - self.last_output_event_time = last_output_event_time - self.last_update_time = last_update_time - - class Operation(_serialization.Model): """A Stream Analytics REST API operation. @@ -5366,7 +4430,7 @@ class Operation(_serialization.Model): "display": {"key": "display", "type": "OperationDisplay"}, } - def __init__(self, *, is_data_action: Optional[bool] = None, **kwargs): + def __init__(self, *, is_data_action: Optional[bool] = None, **kwargs: Any) -> None: """ :keyword is_data_action: Indicates whether the operation is a data action. :paramtype is_data_action: bool @@ -5407,7 +4471,7 @@ class OperationDisplay(_serialization.Model): "description": {"key": "description", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.provider = None @@ -5417,7 +4481,8 @@ def __init__(self, **kwargs): class OperationListResult(_serialization.Model): - """Result of the request to list Stream Analytics operations. It contains a list of operations and a URL link to get the next set of results. + """Result of the request to list Stream Analytics operations. It contains a list of operations and + a URL link to get the next set of results. Variables are only populated by the server, and will be ignored when sending a request. @@ -5438,15 +4503,16 @@ class OperationListResult(_serialization.Model): "next_link": {"key": "nextLink", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.value = None self.next_link = None -class Output(SubResource): # pylint: disable=too-many-instance-attributes - """An output object, containing all information associated with the named output. All outputs are contained under a streaming job. +class Output(SubResource): + """An output object, containing all information associated with the named output. All outputs are + contained under a streaming job. Variables are only populated by the server, and will be ignored when sending a request. @@ -5462,7 +4528,7 @@ class Output(SubResource): # pylint: disable=too-many-instance-attributes :ivar time_window: The time frame for filtering Stream Analytics job outputs. :vartype time_window: str :ivar size_window: The size window to constrain a Stream Analytics output to. - :vartype size_window: float + :vartype size_window: int :ivar serialization: Describes how data from an input is serialized or how data is serialized when written to an output. Required on PUT (CreateOrReplace) requests. :vartype serialization: ~azure.mgmt.streamanalytics.models.Serialization @@ -5473,12 +4539,6 @@ class Output(SubResource): # pylint: disable=too-many-instance-attributes detect whether the resource has changed between requests. You can also use it in the If-Match or If-None-Match headers for write operations for optimistic concurrency. :vartype etag: str - :ivar last_output_event_timestamps: A list of the last output event times for each output - partition. The index of the array corresponds to the partition number. - :vartype last_output_event_timestamps: - list[~azure.mgmt.streamanalytics.models.LastOutputEventTimestamp] - :ivar watermark_settings: Settings which determine whether to send watermarks to downstream. - :vartype watermark_settings: ~azure.mgmt.streamanalytics.models.OutputWatermarkProperties """ _validation = { @@ -5486,7 +4546,6 @@ class Output(SubResource): # pylint: disable=too-many-instance-attributes "type": {"readonly": True}, "diagnostics": {"readonly": True}, "etag": {"readonly": True}, - "last_output_event_timestamps": {"readonly": True}, } _attribute_map = { @@ -5495,15 +4554,10 @@ class Output(SubResource): # pylint: disable=too-many-instance-attributes "type": {"key": "type", "type": "str"}, "datasource": {"key": "properties.datasource", "type": "OutputDataSource"}, "time_window": {"key": "properties.timeWindow", "type": "str"}, - "size_window": {"key": "properties.sizeWindow", "type": "float"}, + "size_window": {"key": "properties.sizeWindow", "type": "int"}, "serialization": {"key": "properties.serialization", "type": "Serialization"}, "diagnostics": {"key": "properties.diagnostics", "type": "Diagnostics"}, "etag": {"key": "properties.etag", "type": "str"}, - "last_output_event_timestamps": { - "key": "properties.lastOutputEventTimestamps", - "type": "[LastOutputEventTimestamp]", - }, - "watermark_settings": {"key": "properties.watermarkSettings", "type": "OutputWatermarkProperties"}, } def __init__( @@ -5512,11 +4566,10 @@ def __init__( name: Optional[str] = None, datasource: Optional["_models.OutputDataSource"] = None, time_window: Optional[str] = None, - size_window: Optional[float] = None, + size_window: Optional[int] = None, serialization: Optional["_models.Serialization"] = None, - watermark_settings: Optional["_models.OutputWatermarkProperties"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword name: Resource name. :paramtype name: str @@ -5526,12 +4579,10 @@ def __init__( :keyword time_window: The time frame for filtering Stream Analytics job outputs. :paramtype time_window: str :keyword size_window: The size window to constrain a Stream Analytics output to. - :paramtype size_window: float + :paramtype size_window: int :keyword serialization: Describes how data from an input is serialized or how data is serialized when written to an output. Required on PUT (CreateOrReplace) requests. :paramtype serialization: ~azure.mgmt.streamanalytics.models.Serialization - :keyword watermark_settings: Settings which determine whether to send watermarks to downstream. - :paramtype watermark_settings: ~azure.mgmt.streamanalytics.models.OutputWatermarkProperties """ super().__init__(name=name, **kwargs) self.datasource = datasource @@ -5540,8 +4591,6 @@ def __init__( self.serialization = serialization self.diagnostics = None self.etag = None - self.last_output_event_timestamps = None - self.watermark_settings = watermark_settings class OutputListResult(_serialization.Model): @@ -5565,59 +4614,21 @@ class OutputListResult(_serialization.Model): "next_link": {"key": "nextLink", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.value = None self.next_link = None -class OutputWatermarkProperties(_serialization.Model): - """Settings which determine whether to send watermarks to downstream. - - :ivar watermark_mode: The output watermark mode. Known values are: "None", - "SendCurrentPartitionWatermark", and "SendLowestWatermarkAcrossPartitions". - :vartype watermark_mode: str or ~azure.mgmt.streamanalytics.models.OutputWatermarkMode - :ivar max_watermark_difference_across_partitions: Describes the maximal delta between the - fastest and slowest partitions, so the out of order window that catches all necessary events in - downstream jobs is well defined. - :vartype max_watermark_difference_across_partitions: str - """ - - _attribute_map = { - "watermark_mode": {"key": "watermarkMode", "type": "str"}, - "max_watermark_difference_across_partitions": {"key": "maxWatermarkDifferenceAcrossPartitions", "type": "str"}, - } - - def __init__( - self, - *, - watermark_mode: Optional[Union[str, "_models.OutputWatermarkMode"]] = None, - max_watermark_difference_across_partitions: Optional[str] = None, - **kwargs - ): - """ - :keyword watermark_mode: The output watermark mode. Known values are: "None", - "SendCurrentPartitionWatermark", and "SendLowestWatermarkAcrossPartitions". - :paramtype watermark_mode: str or ~azure.mgmt.streamanalytics.models.OutputWatermarkMode - :keyword max_watermark_difference_across_partitions: Describes the maximal delta between the - fastest and slowest partitions, so the out of order window that catches all necessary events in - downstream jobs is well defined. - :paramtype max_watermark_difference_across_partitions: str - """ - super().__init__(**kwargs) - self.watermark_mode = watermark_mode - self.max_watermark_difference_across_partitions = max_watermark_difference_across_partitions - - class ParquetSerialization(Serialization): - """Describes how data from an input is serialized or how data is serialized when written to an output in Parquet format. + """Describes how data from an input is serialized or how data is serialized when written to an + output in Parquet format. All required parameters must be populated in order to send to Azure. :ivar type: Indicates the type of serialization that the input or output uses. Required on PUT - (CreateOrReplace) requests. Required. Known values are: "Csv", "Avro", "Json", "CustomClr", and - "Parquet". + (CreateOrReplace) requests. Required. Known values are: "Csv", "Avro", "Json", and "Parquet". :vartype type: str or ~azure.mgmt.streamanalytics.models.EventSerializationType :ivar properties: The properties that are associated with the Parquet serialization type. Required on PUT (CreateOrReplace) requests. @@ -5633,278 +4644,129 @@ class ParquetSerialization(Serialization): "properties": {"key": "properties", "type": "object"}, } - def __init__(self, *, properties: Optional[JSON] = None, **kwargs): + def __init__(self, *, properties: Optional[JSON] = None, **kwargs: Any) -> None: """ :keyword properties: The properties that are associated with the Parquet serialization type. Required on PUT (CreateOrReplace) requests. :paramtype properties: JSON """ super().__init__(**kwargs) - self.type = "Parquet" # type: str + self.type: str = "Parquet" self.properties = properties -class PostgreSQLDataSourceProperties(_serialization.Model): - """The properties that are associated with an Azure SQL database data source. +class PowerBIOutputDataSource(OutputDataSource): + """Describes a Power BI output data source. - :ivar server: The name of the SQL server containing the Azure SQL database. Required on PUT - (CreateOrReplace) requests. - :vartype server: str - :ivar database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) requests. - :vartype database: str - :ivar table: The name of the table in the Azure SQL database. Required on PUT (CreateOrReplace) + All required parameters must be populated in order to send to Azure. + + :ivar type: Indicates the type of data source output will be written to. Required on PUT + (CreateOrReplace) requests. Required. + :vartype type: str + :ivar refresh_token: A refresh token that can be used to obtain a valid access token that can + then be used to authenticate with the data source. A valid refresh token is currently only + obtainable via the Azure Portal. It is recommended to put a dummy string value here when + creating the data source and then going to the Azure Portal to authenticate the data source + which will update this property with a valid refresh token. Required on PUT (CreateOrReplace) requests. + :vartype refresh_token: str + :ivar token_user_principal_name: The user principal name (UPN) of the user that was used to + obtain the refresh token. Use this property to help remember which user was used to obtain the + refresh token. + :vartype token_user_principal_name: str + :ivar token_user_display_name: The user display name of the user that was used to obtain the + refresh token. Use this property to help remember which user was used to obtain the refresh + token. + :vartype token_user_display_name: str + :ivar dataset: The name of the Power BI dataset. Required on PUT (CreateOrReplace) requests. + :vartype dataset: str + :ivar table: The name of the Power BI table under the specified dataset. Required on PUT + (CreateOrReplace) requests. :vartype table: str - :ivar user: The user name that will be used to connect to the Azure SQL database. Required on - PUT (CreateOrReplace) requests. - :vartype user: str - :ivar password: The password that will be used to connect to the Azure SQL database. Required - on PUT (CreateOrReplace) requests. - :vartype password: str - :ivar max_writer_count: Max Writer count, currently only 1(single writer) and 0(based on query - partition) are available. Optional on PUT requests. - :vartype max_writer_count: float + :ivar group_id: The ID of the Power BI group. + :vartype group_id: str + :ivar group_name: The name of the Power BI group. Use this property to help remember which + specific Power BI group id was used. + :vartype group_name: str :ivar authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and "ConnectionString". :vartype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode """ + _validation = { + "type": {"required": True}, + } + _attribute_map = { - "server": {"key": "server", "type": "str"}, - "database": {"key": "database", "type": "str"}, - "table": {"key": "table", "type": "str"}, - "user": {"key": "user", "type": "str"}, - "password": {"key": "password", "type": "str"}, - "max_writer_count": {"key": "maxWriterCount", "type": "float"}, - "authentication_mode": {"key": "authenticationMode", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "refresh_token": {"key": "properties.refreshToken", "type": "str"}, + "token_user_principal_name": {"key": "properties.tokenUserPrincipalName", "type": "str"}, + "token_user_display_name": {"key": "properties.tokenUserDisplayName", "type": "str"}, + "dataset": {"key": "properties.dataset", "type": "str"}, + "table": {"key": "properties.table", "type": "str"}, + "group_id": {"key": "properties.groupId", "type": "str"}, + "group_name": {"key": "properties.groupName", "type": "str"}, + "authentication_mode": {"key": "properties.authenticationMode", "type": "str"}, } def __init__( self, *, - server: Optional[str] = None, - database: Optional[str] = None, + refresh_token: Optional[str] = None, + token_user_principal_name: Optional[str] = None, + token_user_display_name: Optional[str] = None, + dataset: Optional[str] = None, table: Optional[str] = None, - user: Optional[str] = None, - password: Optional[str] = None, - max_writer_count: Optional[float] = None, + group_id: Optional[str] = None, + group_name: Optional[str] = None, authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", - **kwargs - ): + **kwargs: Any + ) -> None: """ - :keyword server: The name of the SQL server containing the Azure SQL database. Required on PUT - (CreateOrReplace) requests. - :paramtype server: str - :keyword database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) + :keyword refresh_token: A refresh token that can be used to obtain a valid access token that + can then be used to authenticate with the data source. A valid refresh token is currently only + obtainable via the Azure Portal. It is recommended to put a dummy string value here when + creating the data source and then going to the Azure Portal to authenticate the data source + which will update this property with a valid refresh token. Required on PUT (CreateOrReplace) requests. - :paramtype database: str - :keyword table: The name of the table in the Azure SQL database. Required on PUT + :paramtype refresh_token: str + :keyword token_user_principal_name: The user principal name (UPN) of the user that was used to + obtain the refresh token. Use this property to help remember which user was used to obtain the + refresh token. + :paramtype token_user_principal_name: str + :keyword token_user_display_name: The user display name of the user that was used to obtain the + refresh token. Use this property to help remember which user was used to obtain the refresh + token. + :paramtype token_user_display_name: str + :keyword dataset: The name of the Power BI dataset. Required on PUT (CreateOrReplace) requests. + :paramtype dataset: str + :keyword table: The name of the Power BI table under the specified dataset. Required on PUT (CreateOrReplace) requests. :paramtype table: str - :keyword user: The user name that will be used to connect to the Azure SQL database. Required - on PUT (CreateOrReplace) requests. - :paramtype user: str - :keyword password: The password that will be used to connect to the Azure SQL database. - Required on PUT (CreateOrReplace) requests. - :paramtype password: str - :keyword max_writer_count: Max Writer count, currently only 1(single writer) and 0(based on - query partition) are available. Optional on PUT requests. - :paramtype max_writer_count: float + :keyword group_id: The ID of the Power BI group. + :paramtype group_id: str + :keyword group_name: The name of the Power BI group. Use this property to help remember which + specific Power BI group id was used. + :paramtype group_name: str :keyword authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and "ConnectionString". :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode """ super().__init__(**kwargs) - self.server = server - self.database = database + self.type: str = "PowerBI" + self.refresh_token = refresh_token + self.token_user_principal_name = token_user_principal_name + self.token_user_display_name = token_user_display_name + self.dataset = dataset self.table = table - self.user = user - self.password = password - self.max_writer_count = max_writer_count + self.group_id = group_id + self.group_name = group_name self.authentication_mode = authentication_mode -class PostgreSQLOutputDataSource(OutputDataSource): - """Describes a PostgreSQL output data source. - - All required parameters must be populated in order to send to Azure. +class PowerBIOutputDataSourceProperties(OAuthBasedDataSourceProperties): + """The properties that are associated with a Power BI output. - :ivar type: Indicates the type of data source output will be written to. Required on PUT - (CreateOrReplace) requests. Required. - :vartype type: str - :ivar server: The name of the SQL server containing the Azure SQL database. Required on PUT - (CreateOrReplace) requests. - :vartype server: str - :ivar database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) requests. - :vartype database: str - :ivar table: The name of the table in the Azure SQL database. Required on PUT (CreateOrReplace) - requests. - :vartype table: str - :ivar user: The user name that will be used to connect to the Azure SQL database. Required on - PUT (CreateOrReplace) requests. - :vartype user: str - :ivar password: The password that will be used to connect to the Azure SQL database. Required - on PUT (CreateOrReplace) requests. - :vartype password: str - :ivar max_writer_count: Max Writer count, currently only 1(single writer) and 0(based on query - partition) are available. Optional on PUT requests. - :vartype max_writer_count: float - :ivar authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and - "ConnectionString". - :vartype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode - """ - - _validation = { - "type": {"required": True}, - } - - _attribute_map = { - "type": {"key": "type", "type": "str"}, - "server": {"key": "properties.server", "type": "str"}, - "database": {"key": "properties.database", "type": "str"}, - "table": {"key": "properties.table", "type": "str"}, - "user": {"key": "properties.user", "type": "str"}, - "password": {"key": "properties.password", "type": "str"}, - "max_writer_count": {"key": "properties.maxWriterCount", "type": "float"}, - "authentication_mode": {"key": "properties.authenticationMode", "type": "str"}, - } - - def __init__( - self, - *, - server: Optional[str] = None, - database: Optional[str] = None, - table: Optional[str] = None, - user: Optional[str] = None, - password: Optional[str] = None, - max_writer_count: Optional[float] = None, - authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", - **kwargs - ): - """ - :keyword server: The name of the SQL server containing the Azure SQL database. Required on PUT - (CreateOrReplace) requests. - :paramtype server: str - :keyword database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) - requests. - :paramtype database: str - :keyword table: The name of the table in the Azure SQL database. Required on PUT - (CreateOrReplace) requests. - :paramtype table: str - :keyword user: The user name that will be used to connect to the Azure SQL database. Required - on PUT (CreateOrReplace) requests. - :paramtype user: str - :keyword password: The password that will be used to connect to the Azure SQL database. - Required on PUT (CreateOrReplace) requests. - :paramtype password: str - :keyword max_writer_count: Max Writer count, currently only 1(single writer) and 0(based on - query partition) are available. Optional on PUT requests. - :paramtype max_writer_count: float - :keyword authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and - "ConnectionString". - :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode - """ - super().__init__(**kwargs) - self.type = "Microsoft.DBForPostgreSQL/servers/databases" # type: str - self.server = server - self.database = database - self.table = table - self.user = user - self.password = password - self.max_writer_count = max_writer_count - self.authentication_mode = authentication_mode - - -class PostgreSQLOutputDataSourceProperties(PostgreSQLDataSourceProperties): - """The properties that are associated with a PostgreSQL output. - - :ivar server: The name of the SQL server containing the Azure SQL database. Required on PUT - (CreateOrReplace) requests. - :vartype server: str - :ivar database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) requests. - :vartype database: str - :ivar table: The name of the table in the Azure SQL database. Required on PUT (CreateOrReplace) - requests. - :vartype table: str - :ivar user: The user name that will be used to connect to the Azure SQL database. Required on - PUT (CreateOrReplace) requests. - :vartype user: str - :ivar password: The password that will be used to connect to the Azure SQL database. Required - on PUT (CreateOrReplace) requests. - :vartype password: str - :ivar max_writer_count: Max Writer count, currently only 1(single writer) and 0(based on query - partition) are available. Optional on PUT requests. - :vartype max_writer_count: float - :ivar authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and - "ConnectionString". - :vartype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode - """ - - _attribute_map = { - "server": {"key": "server", "type": "str"}, - "database": {"key": "database", "type": "str"}, - "table": {"key": "table", "type": "str"}, - "user": {"key": "user", "type": "str"}, - "password": {"key": "password", "type": "str"}, - "max_writer_count": {"key": "maxWriterCount", "type": "float"}, - "authentication_mode": {"key": "authenticationMode", "type": "str"}, - } - - def __init__( - self, - *, - server: Optional[str] = None, - database: Optional[str] = None, - table: Optional[str] = None, - user: Optional[str] = None, - password: Optional[str] = None, - max_writer_count: Optional[float] = None, - authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", - **kwargs - ): - """ - :keyword server: The name of the SQL server containing the Azure SQL database. Required on PUT - (CreateOrReplace) requests. - :paramtype server: str - :keyword database: The name of the Azure SQL database. Required on PUT (CreateOrReplace) - requests. - :paramtype database: str - :keyword table: The name of the table in the Azure SQL database. Required on PUT - (CreateOrReplace) requests. - :paramtype table: str - :keyword user: The user name that will be used to connect to the Azure SQL database. Required - on PUT (CreateOrReplace) requests. - :paramtype user: str - :keyword password: The password that will be used to connect to the Azure SQL database. - Required on PUT (CreateOrReplace) requests. - :paramtype password: str - :keyword max_writer_count: Max Writer count, currently only 1(single writer) and 0(based on - query partition) are available. Optional on PUT requests. - :paramtype max_writer_count: float - :keyword authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and - "ConnectionString". - :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode - """ - super().__init__( - server=server, - database=database, - table=table, - user=user, - password=password, - max_writer_count=max_writer_count, - authentication_mode=authentication_mode, - **kwargs - ) - - -class PowerBIOutputDataSource(OutputDataSource): - """Describes a Power BI output data source. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Indicates the type of data source output will be written to. Required on PUT - (CreateOrReplace) requests. Required. - :vartype type: str :ivar refresh_token: A refresh token that can be used to obtain a valid access token that can then be used to authenticate with the data source. A valid refresh token is currently only obtainable via the Azure Portal. It is recommended to put a dummy string value here when @@ -5935,20 +4797,15 @@ class PowerBIOutputDataSource(OutputDataSource): :vartype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode """ - _validation = { - "type": {"required": True}, - } - _attribute_map = { - "type": {"key": "type", "type": "str"}, - "refresh_token": {"key": "properties.refreshToken", "type": "str"}, - "token_user_principal_name": {"key": "properties.tokenUserPrincipalName", "type": "str"}, - "token_user_display_name": {"key": "properties.tokenUserDisplayName", "type": "str"}, - "dataset": {"key": "properties.dataset", "type": "str"}, - "table": {"key": "properties.table", "type": "str"}, - "group_id": {"key": "properties.groupId", "type": "str"}, - "group_name": {"key": "properties.groupName", "type": "str"}, - "authentication_mode": {"key": "properties.authenticationMode", "type": "str"}, + "refresh_token": {"key": "refreshToken", "type": "str"}, + "token_user_principal_name": {"key": "tokenUserPrincipalName", "type": "str"}, + "token_user_display_name": {"key": "tokenUserDisplayName", "type": "str"}, + "dataset": {"key": "dataset", "type": "str"}, + "table": {"key": "table", "type": "str"}, + "group_id": {"key": "groupId", "type": "str"}, + "group_name": {"key": "groupName", "type": "str"}, + "authentication_mode": {"key": "authenticationMode", "type": "str"}, } def __init__( @@ -5962,8 +4819,8 @@ def __init__( group_id: Optional[str] = None, group_name: Optional[str] = None, authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword refresh_token: A refresh token that can be used to obtain a valid access token that can then be used to authenticate with the data source. A valid refresh token is currently only @@ -5994,11 +4851,12 @@ def __init__( "ConnectionString". :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode """ - super().__init__(**kwargs) - self.type = "PowerBI" # type: str - self.refresh_token = refresh_token - self.token_user_principal_name = token_user_principal_name - self.token_user_display_name = token_user_display_name + super().__init__( + refresh_token=refresh_token, + token_user_principal_name=token_user_principal_name, + token_user_display_name=token_user_display_name, + **kwargs + ) self.dataset = dataset self.table = table self.group_id = group_id @@ -6006,134 +4864,35 @@ def __init__( self.authentication_mode = authentication_mode -class PowerBIOutputDataSourceProperties(OAuthBasedDataSourceProperties): - """The properties that are associated with a Power BI output. +class ProxyResource(Resource): + """The resource model definition for a ARM proxy resource. It will have everything other than + required location and tags. - :ivar refresh_token: A refresh token that can be used to obtain a valid access token that can - then be used to authenticate with the data source. A valid refresh token is currently only - obtainable via the Azure Portal. It is recommended to put a dummy string value here when - creating the data source and then going to the Azure Portal to authenticate the data source - which will update this property with a valid refresh token. Required on PUT (CreateOrReplace) - requests. - :vartype refresh_token: str - :ivar token_user_principal_name: The user principal name (UPN) of the user that was used to - obtain the refresh token. Use this property to help remember which user was used to obtain the - refresh token. - :vartype token_user_principal_name: str - :ivar token_user_display_name: The user display name of the user that was used to obtain the - refresh token. Use this property to help remember which user was used to obtain the refresh - token. - :vartype token_user_display_name: str - :ivar dataset: The name of the Power BI dataset. Required on PUT (CreateOrReplace) requests. - :vartype dataset: str - :ivar table: The name of the Power BI table under the specified dataset. Required on PUT - (CreateOrReplace) requests. - :vartype table: str - :ivar group_id: The ID of the Power BI group. - :vartype group_id: str - :ivar group_name: The name of the Power BI group. Use this property to help remember which - specific Power BI group id was used. - :vartype group_name: str - :ivar authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and - "ConnectionString". - :vartype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str """ + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + } + _attribute_map = { - "refresh_token": {"key": "refreshToken", "type": "str"}, - "token_user_principal_name": {"key": "tokenUserPrincipalName", "type": "str"}, - "token_user_display_name": {"key": "tokenUserDisplayName", "type": "str"}, - "dataset": {"key": "dataset", "type": "str"}, - "table": {"key": "table", "type": "str"}, - "group_id": {"key": "groupId", "type": "str"}, - "group_name": {"key": "groupName", "type": "str"}, - "authentication_mode": {"key": "authenticationMode", "type": "str"}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, } - def __init__( - self, - *, - refresh_token: Optional[str] = None, - token_user_principal_name: Optional[str] = None, - token_user_display_name: Optional[str] = None, - dataset: Optional[str] = None, - table: Optional[str] = None, - group_id: Optional[str] = None, - group_name: Optional[str] = None, - authentication_mode: Union[str, "_models.AuthenticationMode"] = "ConnectionString", - **kwargs - ): - """ - :keyword refresh_token: A refresh token that can be used to obtain a valid access token that - can then be used to authenticate with the data source. A valid refresh token is currently only - obtainable via the Azure Portal. It is recommended to put a dummy string value here when - creating the data source and then going to the Azure Portal to authenticate the data source - which will update this property with a valid refresh token. Required on PUT (CreateOrReplace) - requests. - :paramtype refresh_token: str - :keyword token_user_principal_name: The user principal name (UPN) of the user that was used to - obtain the refresh token. Use this property to help remember which user was used to obtain the - refresh token. - :paramtype token_user_principal_name: str - :keyword token_user_display_name: The user display name of the user that was used to obtain the - refresh token. Use this property to help remember which user was used to obtain the refresh - token. - :paramtype token_user_display_name: str - :keyword dataset: The name of the Power BI dataset. Required on PUT (CreateOrReplace) requests. - :paramtype dataset: str - :keyword table: The name of the Power BI table under the specified dataset. Required on PUT - (CreateOrReplace) requests. - :paramtype table: str - :keyword group_id: The ID of the Power BI group. - :paramtype group_id: str - :keyword group_name: The name of the Power BI group. Use this property to help remember which - specific Power BI group id was used. - :paramtype group_name: str - :keyword authentication_mode: Authentication Mode. Known values are: "Msi", "UserToken", and - "ConnectionString". - :paramtype authentication_mode: str or ~azure.mgmt.streamanalytics.models.AuthenticationMode - """ - super().__init__( - refresh_token=refresh_token, - token_user_principal_name=token_user_principal_name, - token_user_display_name=token_user_display_name, - **kwargs - ) - self.dataset = dataset - self.table = table - self.group_id = group_id - self.group_name = group_name - self.authentication_mode = authentication_mode - - -class ProxyResource(Resource): - """The resource model definition for a ARM proxy resource. It will have everything other than required location and tags. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource Id for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or - Microsoft.Storage/storageAccounts. - :vartype type: str - """ - - _validation = { - "id": {"readonly": True}, - "name": {"readonly": True}, - "type": {"readonly": True}, - } - - _attribute_map = { - "id": {"key": "id", "type": "str"}, - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - } - - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) @@ -6151,12 +4910,16 @@ class PrivateEndpoint(ProxyResource): :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts. :vartype type: str - :ivar properties: The properties associated with a private endpoint. - :vartype properties: ~azure.mgmt.streamanalytics.models.PrivateEndpointProperties :ivar etag: Unique opaque string (generally a GUID) that represents the metadata state of the resource (private endpoint) and changes whenever the resource is updated. Required on PUT (CreateOrUpdate) requests. :vartype etag: str + :ivar created_date: The date when this private endpoint was created. + :vartype created_date: str + :ivar manual_private_link_service_connections: A list of connections to the remote resource. + Immutable after it is set. + :vartype manual_private_link_service_connections: + list[~azure.mgmt.streamanalytics.models.PrivateLinkServiceConnection] """ _validation = { @@ -6164,24 +4927,37 @@ class PrivateEndpoint(ProxyResource): "name": {"readonly": True}, "type": {"readonly": True}, "etag": {"readonly": True}, + "created_date": {"readonly": True}, } _attribute_map = { "id": {"key": "id", "type": "str"}, "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, - "properties": {"key": "properties", "type": "PrivateEndpointProperties"}, "etag": {"key": "etag", "type": "str"}, + "created_date": {"key": "properties.createdDate", "type": "str"}, + "manual_private_link_service_connections": { + "key": "properties.manualPrivateLinkServiceConnections", + "type": "[PrivateLinkServiceConnection]", + }, } - def __init__(self, *, properties: Optional["_models.PrivateEndpointProperties"] = None, **kwargs): + def __init__( + self, + *, + manual_private_link_service_connections: Optional[List["_models.PrivateLinkServiceConnection"]] = None, + **kwargs: Any + ) -> None: """ - :keyword properties: The properties associated with a private endpoint. - :paramtype properties: ~azure.mgmt.streamanalytics.models.PrivateEndpointProperties + :keyword manual_private_link_service_connections: A list of connections to the remote resource. + Immutable after it is set. + :paramtype manual_private_link_service_connections: + list[~azure.mgmt.streamanalytics.models.PrivateLinkServiceConnection] """ super().__init__(**kwargs) - self.properties = properties self.etag = None + self.created_date = None + self.manual_private_link_service_connections = manual_private_link_service_connections class PrivateEndpointListResult(_serialization.Model): @@ -6205,57 +4981,16 @@ class PrivateEndpointListResult(_serialization.Model): "next_link": {"key": "nextLink", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.value = None self.next_link = None -class PrivateEndpointProperties(_serialization.Model): - """The properties associated with a private endpoint. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar created_date: The date when this private endpoint was created. - :vartype created_date: str - :ivar manual_private_link_service_connections: A list of connections to the remote resource. - Immutable after it is set. - :vartype manual_private_link_service_connections: - list[~azure.mgmt.streamanalytics.models.PrivateLinkServiceConnection] - """ - - _validation = { - "created_date": {"readonly": True}, - } - - _attribute_map = { - "created_date": {"key": "createdDate", "type": "str"}, - "manual_private_link_service_connections": { - "key": "manualPrivateLinkServiceConnections", - "type": "[PrivateLinkServiceConnection]", - }, - } - - def __init__( - self, - *, - manual_private_link_service_connections: Optional[List["_models.PrivateLinkServiceConnection"]] = None, - **kwargs - ): - """ - :keyword manual_private_link_service_connections: A list of connections to the remote resource. - Immutable after it is set. - :paramtype manual_private_link_service_connections: - list[~azure.mgmt.streamanalytics.models.PrivateLinkServiceConnection] - """ - super().__init__(**kwargs) - self.created_date = None - self.manual_private_link_service_connections = manual_private_link_service_connections - - class PrivateLinkConnectionState(_serialization.Model): - """A collection of read-only information about the state of the connection to the private remote resource. + """A collection of read-only information about the state of the connection to the private remote + resource. Variables are only populated by the server, and will be ignored when sending a request. @@ -6281,7 +5016,7 @@ class PrivateLinkConnectionState(_serialization.Model): "actions_required": {"key": "actionsRequired", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.status = None @@ -6304,393 +5039,50 @@ class PrivateLinkServiceConnection(_serialization.Model): connection request. Restricted to 140 chars. :vartype request_message: str :ivar private_link_service_connection_state: A collection of read-only information about the - state of the connection to the private remote resource. - :vartype private_link_service_connection_state: - ~azure.mgmt.streamanalytics.models.PrivateLinkConnectionState - """ - - _validation = { - "request_message": {"readonly": True}, - } - - _attribute_map = { - "private_link_service_id": {"key": "properties.privateLinkServiceId", "type": "str"}, - "group_ids": {"key": "properties.groupIds", "type": "[str]"}, - "request_message": {"key": "properties.requestMessage", "type": "str"}, - "private_link_service_connection_state": { - "key": "properties.privateLinkServiceConnectionState", - "type": "PrivateLinkConnectionState", - }, - } - - def __init__( - self, - *, - private_link_service_id: Optional[str] = None, - group_ids: Optional[List[str]] = None, - private_link_service_connection_state: Optional["_models.PrivateLinkConnectionState"] = None, - **kwargs - ): - """ - :keyword private_link_service_id: The resource id of the private link service. Required on PUT - (CreateOrUpdate) requests. - :paramtype private_link_service_id: str - :keyword group_ids: The ID(s) of the group(s) obtained from the remote resource that this - private endpoint should connect to. Required on PUT (CreateOrUpdate) requests. - :paramtype group_ids: list[str] - :keyword private_link_service_connection_state: A collection of read-only information about the - state of the connection to the private remote resource. - :paramtype private_link_service_connection_state: - ~azure.mgmt.streamanalytics.models.PrivateLinkConnectionState - """ - super().__init__(**kwargs) - self.private_link_service_id = private_link_service_id - self.group_ids = group_ids - self.request_message = None - self.private_link_service_connection_state = private_link_service_connection_state - - -class QueryCompilationError(_serialization.Model): - """An error produced by the compiler. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar message: The content of the error message. - :vartype message: str - :ivar start_line: Describes the error location in the original query. Not set if isGlobal is - true. - :vartype start_line: int - :ivar start_column: Describes the error location in the original query. Not set if isGlobal is - true. - :vartype start_column: int - :ivar end_line: Describes the error location in the original query. Not set if isGlobal is - true. - :vartype end_line: int - :ivar end_column: Describes the error location in the original query. Not set if isGlobal is - true. - :vartype end_column: int - :ivar is_global: Whether the error is not for a specific part but for the entire query. - :vartype is_global: bool - """ - - _validation = { - "message": {"readonly": True}, - "start_line": {"readonly": True}, - "start_column": {"readonly": True}, - "end_line": {"readonly": True}, - "end_column": {"readonly": True}, - "is_global": {"readonly": True}, - } - - _attribute_map = { - "message": {"key": "message", "type": "str"}, - "start_line": {"key": "startLine", "type": "int"}, - "start_column": {"key": "startColumn", "type": "int"}, - "end_line": {"key": "endLine", "type": "int"}, - "end_column": {"key": "endColumn", "type": "int"}, - "is_global": {"key": "isGlobal", "type": "bool"}, - } - - def __init__(self, **kwargs): - """ """ - super().__init__(**kwargs) - self.message = None - self.start_line = None - self.start_column = None - self.end_line = None - self.end_column = None - self.is_global = None - - -class QueryCompilationResult(_serialization.Model): - """The result of the query compilation request. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar errors: Error messages produced by the compiler. - :vartype errors: list[~azure.mgmt.streamanalytics.models.QueryCompilationError] - :ivar warnings: Warning messages produced by the compiler. - :vartype warnings: list[str] - :ivar inputs: All input names used by the query. - :vartype inputs: list[str] - :ivar outputs: All output names used by the query. - :vartype outputs: list[str] - :ivar functions: All function names used by the query. - :vartype functions: list[str] - """ - - _validation = { - "errors": {"readonly": True}, - "warnings": {"readonly": True}, - "inputs": {"readonly": True}, - "outputs": {"readonly": True}, - "functions": {"readonly": True}, - } - - _attribute_map = { - "errors": {"key": "errors", "type": "[QueryCompilationError]"}, - "warnings": {"key": "warnings", "type": "[str]"}, - "inputs": {"key": "inputs", "type": "[str]"}, - "outputs": {"key": "outputs", "type": "[str]"}, - "functions": {"key": "functions", "type": "[str]"}, - } - - def __init__(self, **kwargs): - """ """ - super().__init__(**kwargs) - self.errors = None - self.warnings = None - self.inputs = None - self.outputs = None - self.functions = None - - -class QueryFunction(_serialization.Model): - """A function for the query compilation. - - All required parameters must be populated in order to send to Azure. - - :ivar name: The name of the function. Required. - :vartype name: str - :ivar type: The type of the function. Required. - :vartype type: str - :ivar binding_type: The type of the function binding. Required. - :vartype binding_type: str - :ivar inputs: The inputs for the function. Required. - :vartype inputs: list[~azure.mgmt.streamanalytics.models.FunctionInput] - :ivar output: An output for the function. Required. - :vartype output: ~azure.mgmt.streamanalytics.models.FunctionOutput - """ - - _validation = { - "name": {"required": True}, - "type": {"required": True}, - "binding_type": {"required": True}, - "inputs": {"required": True}, - "output": {"required": True}, - } - - _attribute_map = { - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - "binding_type": {"key": "bindingType", "type": "str"}, - "inputs": {"key": "inputs", "type": "[FunctionInput]"}, - "output": {"key": "output", "type": "FunctionOutput"}, - } - - def __init__( - self, - *, - name: str, - type: str, - binding_type: str, - inputs: List["_models.FunctionInput"], - output: "_models.FunctionOutput", - **kwargs - ): - """ - :keyword name: The name of the function. Required. - :paramtype name: str - :keyword type: The type of the function. Required. - :paramtype type: str - :keyword binding_type: The type of the function binding. Required. - :paramtype binding_type: str - :keyword inputs: The inputs for the function. Required. - :paramtype inputs: list[~azure.mgmt.streamanalytics.models.FunctionInput] - :keyword output: An output for the function. Required. - :paramtype output: ~azure.mgmt.streamanalytics.models.FunctionOutput - """ - super().__init__(**kwargs) - self.name = name - self.type = type - self.binding_type = binding_type - self.inputs = inputs - self.output = output - - -class QueryInput(_serialization.Model): - """An input for the query compilation. - - All required parameters must be populated in order to send to Azure. - - :ivar name: The name of the input. Required. - :vartype name: str - :ivar type: The type of the input, can be Stream or Reference. Required. - :vartype type: str - """ - - _validation = { - "name": {"required": True}, - "type": {"required": True}, - } - - _attribute_map = { - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - } - - def __init__(self, *, name: str, type: str, **kwargs): - """ - :keyword name: The name of the input. Required. - :paramtype name: str - :keyword type: The type of the input, can be Stream or Reference. Required. - :paramtype type: str - """ - super().__init__(**kwargs) - self.name = name - self.type = type - - -class QueryTestingResult(Error): - """The result of the query testing request. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar error: Error definition properties. - :vartype error: ~azure.mgmt.streamanalytics.models.ErrorError - :ivar status: The status of the query testing request. Known values are: "Started", "Success", - "CompilerError", "RuntimeError", "Timeout", and "UnknownError". - :vartype status: str or ~azure.mgmt.streamanalytics.models.QueryTestingResultStatus - :ivar output_uri: The SAS URL to the outputs payload. - :vartype output_uri: str - """ - - _validation = { - "status": {"readonly": True}, - "output_uri": {"readonly": True}, - } - - _attribute_map = { - "error": {"key": "error", "type": "ErrorError"}, - "status": {"key": "status", "type": "str"}, - "output_uri": {"key": "outputUri", "type": "str"}, - } - - def __init__(self, *, error: Optional["_models.ErrorError"] = None, **kwargs): - """ - :keyword error: Error definition properties. - :paramtype error: ~azure.mgmt.streamanalytics.models.ErrorError - """ - super().__init__(error=error, **kwargs) - self.status = None - self.output_uri = None - - -class RawOutputDatasource(OutputDataSource): - """Describes a raw output data source. This data source type is only applicable/usable when using the query testing API. You cannot create a job with this data source type or add an output of this data source type to an existing job. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Indicates the type of data source output will be written to. Required on PUT - (CreateOrReplace) requests. Required. - :vartype type: str - :ivar payload_uri: The SAS URL to a blob where the output should be written. If this property - is not set, output data will be written into a temporary storage, and a SAS URL to that - temporary storage will be included in the result. - :vartype payload_uri: str - """ - - _validation = { - "type": {"required": True}, - } - - _attribute_map = { - "type": {"key": "type", "type": "str"}, - "payload_uri": {"key": "properties.payloadUri", "type": "str"}, - } - - def __init__(self, *, payload_uri: Optional[str] = None, **kwargs): - """ - :keyword payload_uri: The SAS URL to a blob where the output should be written. If this - property is not set, output data will be written into a temporary storage, and a SAS URL to - that temporary storage will be included in the result. - :paramtype payload_uri: str - """ - super().__init__(**kwargs) - self.type = "Raw" # type: str - self.payload_uri = payload_uri - - -class RawReferenceInputDataSource(ReferenceInputDataSource): - """Describes a raw input data source that contains reference data. This data source type is only applicable/usable when using the query testing API. You cannot create a job with this data source type or add an input of this data source type to an existing job. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Indicates the type of input data source containing reference data. Required on PUT - (CreateOrReplace) requests. Required. - :vartype type: str - :ivar payload: The JSON serialized content of the input data. Either payload or payloadUri must - be set, but not both. - :vartype payload: str - :ivar payload_uri: The SAS URL to a blob containing the JSON serialized content of the input - data. Either payload or payloadUri must be set, but not both. - :vartype payload_uri: str - """ - - _validation = { - "type": {"required": True}, - } - - _attribute_map = { - "type": {"key": "type", "type": "str"}, - "payload": {"key": "properties.payload", "type": "str"}, - "payload_uri": {"key": "properties.payloadUri", "type": "str"}, - } - - def __init__(self, *, payload: Optional[str] = None, payload_uri: Optional[str] = None, **kwargs): - """ - :keyword payload: The JSON serialized content of the input data. Either payload or payloadUri - must be set, but not both. - :paramtype payload: str - :keyword payload_uri: The SAS URL to a blob containing the JSON serialized content of the input - data. Either payload or payloadUri must be set, but not both. - :paramtype payload_uri: str - """ - super().__init__(**kwargs) - self.type = "Raw" # type: str - self.payload = payload - self.payload_uri = payload_uri - - -class RawStreamInputDataSource(StreamInputDataSource): - """Describes a raw input data source that contains stream data. This data source type is only applicable/usable when using the query testing API. You cannot create a job with this data source type or add an input of this data source type to an existing job. - - All required parameters must be populated in order to send to Azure. - - :ivar type: Indicates the type of input data source containing stream data. Required on PUT - (CreateOrReplace) requests. Required. - :vartype type: str - :ivar payload: The JSON serialized content of the input data. Either payload or payloadUri must - be set, but not both. - :vartype payload: str - :ivar payload_uri: The SAS URL to a blob containing the JSON serialized content of the input - data. Either payload or payloadUri must be set, but not both. - :vartype payload_uri: str + state of the connection to the private remote resource. + :vartype private_link_service_connection_state: + ~azure.mgmt.streamanalytics.models.PrivateLinkConnectionState """ _validation = { - "type": {"required": True}, + "request_message": {"readonly": True}, } _attribute_map = { - "type": {"key": "type", "type": "str"}, - "payload": {"key": "properties.payload", "type": "str"}, - "payload_uri": {"key": "properties.payloadUri", "type": "str"}, + "private_link_service_id": {"key": "properties.privateLinkServiceId", "type": "str"}, + "group_ids": {"key": "properties.groupIds", "type": "[str]"}, + "request_message": {"key": "properties.requestMessage", "type": "str"}, + "private_link_service_connection_state": { + "key": "properties.privateLinkServiceConnectionState", + "type": "PrivateLinkConnectionState", + }, } - def __init__(self, *, payload: Optional[str] = None, payload_uri: Optional[str] = None, **kwargs): + def __init__( + self, + *, + private_link_service_id: Optional[str] = None, + group_ids: Optional[List[str]] = None, + private_link_service_connection_state: Optional["_models.PrivateLinkConnectionState"] = None, + **kwargs: Any + ) -> None: """ - :keyword payload: The JSON serialized content of the input data. Either payload or payloadUri - must be set, but not both. - :paramtype payload: str - :keyword payload_uri: The SAS URL to a blob containing the JSON serialized content of the input - data. Either payload or payloadUri must be set, but not both. - :paramtype payload_uri: str + :keyword private_link_service_id: The resource id of the private link service. Required on PUT + (CreateOrUpdate) requests. + :paramtype private_link_service_id: str + :keyword group_ids: The ID(s) of the group(s) obtained from the remote resource that this + private endpoint should connect to. Required on PUT (CreateOrUpdate) requests. + :paramtype group_ids: list[str] + :keyword private_link_service_connection_state: A collection of read-only information about the + state of the connection to the private remote resource. + :paramtype private_link_service_connection_state: + ~azure.mgmt.streamanalytics.models.PrivateLinkConnectionState """ super().__init__(**kwargs) - self.type = "Raw" # type: str - self.payload = payload - self.payload_uri = payload_uri + self.private_link_service_id = private_link_service_id + self.group_ids = group_ids + self.request_message = None + self.private_link_service_connection_state = private_link_service_connection_state class ReferenceInputProperties(InputProperties): @@ -6718,8 +5110,6 @@ class ReferenceInputProperties(InputProperties): :ivar partition_key: partitionKey Describes a key in the input data which is used for partitioning the input data. :vartype partition_key: str - :ivar watermark_settings: Settings which determine whether to read watermark events. - :vartype watermark_settings: ~azure.mgmt.streamanalytics.models.InputWatermarkProperties :ivar datasource: Describes an input data source that contains reference data. Required on PUT (CreateOrReplace) requests. :vartype datasource: ~azure.mgmt.streamanalytics.models.ReferenceInputDataSource @@ -6738,7 +5128,6 @@ class ReferenceInputProperties(InputProperties): "etag": {"key": "etag", "type": "str"}, "compression": {"key": "compression", "type": "Compression"}, "partition_key": {"key": "partitionKey", "type": "str"}, - "watermark_settings": {"key": "watermarkSettings", "type": "InputWatermarkProperties"}, "datasource": {"key": "datasource", "type": "ReferenceInputDataSource"}, } @@ -6748,10 +5137,9 @@ def __init__( serialization: Optional["_models.Serialization"] = None, compression: Optional["_models.Compression"] = None, partition_key: Optional[str] = None, - watermark_settings: Optional["_models.InputWatermarkProperties"] = None, datasource: Optional["_models.ReferenceInputDataSource"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword serialization: Describes how data from an input is serialized or how data is serialized when written to an output. Required on PUT (CreateOrReplace) requests. @@ -6761,92 +5149,15 @@ def __init__( :keyword partition_key: partitionKey Describes a key in the input data which is used for partitioning the input data. :paramtype partition_key: str - :keyword watermark_settings: Settings which determine whether to read watermark events. - :paramtype watermark_settings: ~azure.mgmt.streamanalytics.models.InputWatermarkProperties :keyword datasource: Describes an input data source that contains reference data. Required on PUT (CreateOrReplace) requests. :paramtype datasource: ~azure.mgmt.streamanalytics.models.ReferenceInputDataSource """ - super().__init__( - serialization=serialization, - compression=compression, - partition_key=partition_key, - watermark_settings=watermark_settings, - **kwargs - ) - self.type = "Reference" # type: str + super().__init__(serialization=serialization, compression=compression, partition_key=partition_key, **kwargs) + self.type: str = "Reference" self.datasource = datasource -class RefreshConfiguration(_serialization.Model): - """The refresh parameters for any/all updatable user defined functions present in the job config. - - :ivar path_pattern: The blob path pattern. Not a regular expression. It represents a pattern - against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See - https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or - https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more - detailed explanation and example. - :vartype path_pattern: str - :ivar date_format: The date format. Wherever {date} appears in pathPattern, the value of this - property is used as the date format instead. - :vartype date_format: str - :ivar time_format: The time format. Wherever {time} appears in pathPattern, the value of this - property is used as the time format instead. - :vartype time_format: str - :ivar refresh_interval: The refresh interval. - :vartype refresh_interval: str - :ivar refresh_type: This property indicates which data refresh option to use, Blocking or - Nonblocking. Known values are: "Blocking" and "Nonblocking". - :vartype refresh_type: str or ~azure.mgmt.streamanalytics.models.UpdatableUdfRefreshType - """ - - _attribute_map = { - "path_pattern": {"key": "pathPattern", "type": "str"}, - "date_format": {"key": "dateFormat", "type": "str"}, - "time_format": {"key": "timeFormat", "type": "str"}, - "refresh_interval": {"key": "refreshInterval", "type": "str"}, - "refresh_type": {"key": "refreshType", "type": "str"}, - } - - def __init__( - self, - *, - path_pattern: Optional[str] = None, - date_format: Optional[str] = None, - time_format: Optional[str] = None, - refresh_interval: Optional[str] = None, - refresh_type: Optional[Union[str, "_models.UpdatableUdfRefreshType"]] = None, - **kwargs - ): - """ - :keyword path_pattern: The blob path pattern. Not a regular expression. It represents a pattern - against which blob names will be matched to determine whether or not they should be included as - input or output to the job. See - https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-input or - https://docs.microsoft.com/en-us/rest/api/streamanalytics/stream-analytics-output for a more - detailed explanation and example. - :paramtype path_pattern: str - :keyword date_format: The date format. Wherever {date} appears in pathPattern, the value of - this property is used as the date format instead. - :paramtype date_format: str - :keyword time_format: The time format. Wherever {time} appears in pathPattern, the value of - this property is used as the time format instead. - :paramtype time_format: str - :keyword refresh_interval: The refresh interval. - :paramtype refresh_interval: str - :keyword refresh_type: This property indicates which data refresh option to use, Blocking or - Nonblocking. Known values are: "Blocking" and "Nonblocking". - :paramtype refresh_type: str or ~azure.mgmt.streamanalytics.models.UpdatableUdfRefreshType - """ - super().__init__(**kwargs) - self.path_pattern = path_pattern - self.date_format = date_format - self.time_format = time_format - self.refresh_interval = refresh_interval - self.refresh_type = refresh_type - - class ResourceTestStatus(_serialization.Model): """Describes the status of the test operation along with error information, if applicable. @@ -6868,112 +5179,13 @@ class ResourceTestStatus(_serialization.Model): "error": {"key": "error", "type": "ErrorResponse"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.status = None self.error = None -class SampleInput(_serialization.Model): - """The stream analytics input to sample. - - :ivar input: The stream analytics input to sample. - :vartype input: ~azure.mgmt.streamanalytics.models.Input - :ivar compatibility_level: Defaults to the default ASA job compatibility level. Today it is - 1.2. - :vartype compatibility_level: str - :ivar events_uri: The SAS URI of the storage blob for service to write the sampled events to. - If this parameter is not provided, service will write events to he system account and share a - temporary SAS URI to it. - :vartype events_uri: str - :ivar data_locale: Defaults to en-US. - :vartype data_locale: str - """ - - _attribute_map = { - "input": {"key": "input", "type": "Input"}, - "compatibility_level": {"key": "compatibilityLevel", "type": "str"}, - "events_uri": {"key": "eventsUri", "type": "str"}, - "data_locale": {"key": "dataLocale", "type": "str"}, - } - - def __init__( - self, - *, - input: Optional["_models.Input"] = None, - compatibility_level: Optional[str] = None, - events_uri: Optional[str] = None, - data_locale: Optional[str] = None, - **kwargs - ): - """ - :keyword input: The stream analytics input to sample. - :paramtype input: ~azure.mgmt.streamanalytics.models.Input - :keyword compatibility_level: Defaults to the default ASA job compatibility level. Today it is - 1.2. - :paramtype compatibility_level: str - :keyword events_uri: The SAS URI of the storage blob for service to write the sampled events - to. If this parameter is not provided, service will write events to he system account and share - a temporary SAS URI to it. - :paramtype events_uri: str - :keyword data_locale: Defaults to en-US. - :paramtype data_locale: str - """ - super().__init__(**kwargs) - self.input = input - self.compatibility_level = compatibility_level - self.events_uri = events_uri - self.data_locale = data_locale - - -class SampleInputResult(Error): - """The result of the sample input request. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar error: Error definition properties. - :vartype error: ~azure.mgmt.streamanalytics.models.ErrorError - :ivar status: The status of the sample input request. Known values are: "ReadAllEventsInRange", - "NoEventsFoundInRange", and "ErrorConnectingToInput". - :vartype status: str or ~azure.mgmt.streamanalytics.models.SampleInputResultStatus - :ivar diagnostics: Diagnostics messages. E.g. message indicating some partitions from the input - have no data. - :vartype diagnostics: list[str] - :ivar events_download_url: A SAS URL to download the sampled input data. - :vartype events_download_url: str - :ivar last_arrival_time: The timestamp for the last event in the data. It is in DateTime - format. - :vartype last_arrival_time: str - """ - - _validation = { - "status": {"readonly": True}, - "diagnostics": {"readonly": True}, - "events_download_url": {"readonly": True}, - "last_arrival_time": {"readonly": True}, - } - - _attribute_map = { - "error": {"key": "error", "type": "ErrorError"}, - "status": {"key": "status", "type": "str"}, - "diagnostics": {"key": "diagnostics", "type": "[str]"}, - "events_download_url": {"key": "eventsDownloadUrl", "type": "str"}, - "last_arrival_time": {"key": "lastArrivalTime", "type": "str"}, - } - - def __init__(self, *, error: Optional["_models.ErrorError"] = None, **kwargs): - """ - :keyword error: Error definition properties. - :paramtype error: ~azure.mgmt.streamanalytics.models.ErrorError - """ - super().__init__(error=error, **kwargs) - self.status = None - self.diagnostics = None - self.events_download_url = None - self.last_arrival_time = None - - class ScalarFunctionProperties(FunctionProperties): """The properties that are associated with a scalar function. @@ -7015,8 +5227,8 @@ def __init__( inputs: Optional[List["_models.FunctionInput"]] = None, output: Optional["_models.FunctionOutput"] = None, binding: Optional["_models.FunctionBinding"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword inputs: :paramtype inputs: list[~azure.mgmt.streamanalytics.models.FunctionInput] @@ -7027,7 +5239,7 @@ def __init__( :paramtype binding: ~azure.mgmt.streamanalytics.models.FunctionBinding """ super().__init__(inputs=inputs, output=output, binding=binding, **kwargs) - self.type = "Scalar" # type: str + self.type: str = "Scalar" class ScaleStreamingJobParameters(_serialization.Model): @@ -7042,7 +5254,7 @@ class ScaleStreamingJobParameters(_serialization.Model): "streaming_units": {"key": "streamingUnits", "type": "int"}, } - def __init__(self, *, streaming_units: Optional[int] = None, **kwargs): + def __init__(self, *, streaming_units: Optional[int] = None, **kwargs: Any) -> None: """ :keyword streaming_units: Specifies the number of streaming units that the streaming job will scale to. @@ -7110,8 +5322,8 @@ def __init__( queue_name: Optional[str] = None, property_columns: Optional[List[str]] = None, system_property_columns: Optional[JSON] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword service_bus_namespace: The namespace that is associated with the desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. @@ -7138,7 +5350,7 @@ def __init__( :paramtype system_property_columns: JSON """ super().__init__(**kwargs) - self.type = "Microsoft.ServiceBus/Queue" # type: str + self.type: str = "Microsoft.ServiceBus/Queue" self.service_bus_namespace = service_bus_namespace self.shared_access_policy_name = shared_access_policy_name self.shared_access_policy_key = shared_access_policy_key @@ -7196,8 +5408,8 @@ def __init__( queue_name: Optional[str] = None, property_columns: Optional[List[str]] = None, system_property_columns: Optional[JSON] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword service_bus_namespace: The namespace that is associated with the desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. @@ -7293,8 +5505,8 @@ def __init__( topic_name: Optional[str] = None, property_columns: Optional[List[str]] = None, system_property_columns: Optional[Dict[str, str]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword service_bus_namespace: The namespace that is associated with the desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. @@ -7321,7 +5533,7 @@ def __init__( :paramtype system_property_columns: dict[str, str] """ super().__init__(**kwargs) - self.type = "Microsoft.ServiceBus/Topic" # type: str + self.type: str = "Microsoft.ServiceBus/Topic" self.service_bus_namespace = service_bus_namespace self.shared_access_policy_name = shared_access_policy_name self.shared_access_policy_key = shared_access_policy_key @@ -7379,8 +5591,8 @@ def __init__( topic_name: Optional[str] = None, property_columns: Optional[List[str]] = None, system_property_columns: Optional[Dict[str, str]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword service_bus_namespace: The namespace that is associated with the desired Event Hub, Service Bus Queue, Service Bus Topic, etc. Required on PUT (CreateOrReplace) requests. @@ -7423,71 +5635,19 @@ class Sku(_serialization.Model): :ivar name: The name of the SKU. Required on PUT (CreateOrReplace) requests. "Standard" :vartype name: str or ~azure.mgmt.streamanalytics.models.SkuName - :ivar capacity: The capacity of the SKU. - :vartype capacity: int """ _attribute_map = { "name": {"key": "name", "type": "str"}, - "capacity": {"key": "capacity", "type": "int"}, } - def __init__( - self, *, name: Optional[Union[str, "_models.SkuName"]] = None, capacity: Optional[int] = None, **kwargs - ): + def __init__(self, *, name: Optional[Union[str, "_models.SkuName"]] = None, **kwargs: Any) -> None: """ :keyword name: The name of the SKU. Required on PUT (CreateOrReplace) requests. "Standard" :paramtype name: str or ~azure.mgmt.streamanalytics.models.SkuName - :keyword capacity: The capacity of the SKU. - :paramtype capacity: int """ super().__init__(**kwargs) self.name = name - self.capacity = capacity - - -class SkuCapacity(_serialization.Model): - """Describes scaling information of a SKU. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar minimum: Specifies the minimum streaming units that the streaming job can use. - :vartype minimum: int - :ivar maximum: Specifies the maximum streaming units that the streaming job can use. - :vartype maximum: int - :ivar default: Specifies the default streaming units that the streaming job can use. - :vartype default: int - :ivar scale_type: The scale type applicable to the SKU. Known values are: "automatic", - "manual", and "none". - :vartype scale_type: str or ~azure.mgmt.streamanalytics.models.SkuCapacityScaleType - :ivar allowed_values: Specifies the valid streaming units a streaming job can scale to. - :vartype allowed_values: list[int] - """ - - _validation = { - "minimum": {"readonly": True}, - "maximum": {"readonly": True}, - "default": {"readonly": True}, - "scale_type": {"readonly": True}, - "allowed_values": {"readonly": True}, - } - - _attribute_map = { - "minimum": {"key": "minimum", "type": "int"}, - "maximum": {"key": "maximum", "type": "int"}, - "default": {"key": "default", "type": "int"}, - "scale_type": {"key": "scaleType", "type": "str"}, - "allowed_values": {"key": "allowedValues", "type": "[int]"}, - } - - def __init__(self, **kwargs): - """ """ - super().__init__(**kwargs) - self.minimum = None - self.maximum = None - self.default = None - self.scale_type = None - self.allowed_values = None class StartStreamingJobParameters(_serialization.Model): @@ -7516,8 +5676,8 @@ def __init__( *, output_start_mode: Optional[Union[str, "_models.OutputStartMode"]] = None, output_start_time: Optional[datetime.datetime] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword output_start_mode: Value may be JobStartTime, CustomTime, or LastOutputEventTime to indicate whether the starting point of the output event stream should start whenever the job is @@ -7553,14 +5713,11 @@ class StreamingJob(TrackedResource): # pylint: disable=too-many-instance-attrib :vartype tags: dict[str, str] :ivar location: The geo-location where the resource lives. :vartype location: str + :ivar identity: Describes the system-assigned managed identity assigned to this job that can be + used to authenticate with inputs and outputs. + :vartype identity: ~azure.mgmt.streamanalytics.models.Identity :ivar sku: Describes the SKU of the streaming job. Required on PUT (CreateOrReplace) requests. :vartype sku: ~azure.mgmt.streamanalytics.models.Sku - :ivar identity: Describes the managed identity assigned to this job that can be used to - authenticate with inputs and outputs. - :vartype identity: ~azure.mgmt.streamanalytics.models.Identity - :ivar sku_properties_sku: Describes the SKU of the streaming job. Required on PUT - (CreateOrReplace) requests. - :vartype sku_properties_sku: ~azure.mgmt.streamanalytics.models.Sku :ivar job_id: A GUID uniquely identifying the streaming job. This GUID is generated upon creation of the streaming job. :vartype job_id: str @@ -7644,8 +5801,6 @@ class StreamingJob(TrackedResource): # pylint: disable=too-many-instance-attrib JobStorageAccount, this requires the user to also specify jobStorageAccount property. . Known values are: "SystemAccount" and "JobStorageAccount". :vartype content_storage_policy: str or ~azure.mgmt.streamanalytics.models.ContentStoragePolicy - :ivar externals: The storage account where the custom code artifacts are located. - :vartype externals: ~azure.mgmt.streamanalytics.models.External :ivar cluster: The cluster which streaming jobs will run on. :vartype cluster: ~azure.mgmt.streamanalytics.models.ClusterInfo """ @@ -7668,9 +5823,8 @@ class StreamingJob(TrackedResource): # pylint: disable=too-many-instance-attrib "type": {"key": "type", "type": "str"}, "tags": {"key": "tags", "type": "{str}"}, "location": {"key": "location", "type": "str"}, - "sku": {"key": "sku", "type": "Sku"}, "identity": {"key": "identity", "type": "Identity"}, - "sku_properties_sku": {"key": "properties.sku", "type": "Sku"}, + "sku": {"key": "properties.sku", "type": "Sku"}, "job_id": {"key": "properties.jobId", "type": "str"}, "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, "job_state": {"key": "properties.jobState", "type": "str"}, @@ -7698,7 +5852,6 @@ class StreamingJob(TrackedResource): # pylint: disable=too-many-instance-attrib "etag": {"key": "properties.etag", "type": "str"}, "job_storage_account": {"key": "properties.jobStorageAccount", "type": "JobStorageAccount"}, "content_storage_policy": {"key": "properties.contentStoragePolicy", "type": "str"}, - "externals": {"key": "properties.externals", "type": "External"}, "cluster": {"key": "properties.cluster", "type": "ClusterInfo"}, } @@ -7707,9 +5860,8 @@ def __init__( # pylint: disable=too-many-locals *, tags: Optional[Dict[str, str]] = None, location: Optional[str] = None, - sku: Optional["_models.Sku"] = None, identity: Optional["_models.Identity"] = None, - sku_properties_sku: Optional["_models.Sku"] = None, + sku: Optional["_models.Sku"] = None, job_type: Optional[Union[str, "_models.JobType"]] = None, output_start_mode: Optional[Union[str, "_models.OutputStartMode"]] = None, output_start_time: Optional[datetime.datetime] = None, @@ -7725,24 +5877,20 @@ def __init__( # pylint: disable=too-many-locals functions: Optional[List["_models.Function"]] = None, job_storage_account: Optional["_models.JobStorageAccount"] = None, content_storage_policy: Optional[Union[str, "_models.ContentStoragePolicy"]] = None, - externals: Optional["_models.External"] = None, cluster: Optional["_models.ClusterInfo"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword tags: Resource tags. :paramtype tags: dict[str, str] :keyword location: The geo-location where the resource lives. :paramtype location: str + :keyword identity: Describes the system-assigned managed identity assigned to this job that can + be used to authenticate with inputs and outputs. + :paramtype identity: ~azure.mgmt.streamanalytics.models.Identity :keyword sku: Describes the SKU of the streaming job. Required on PUT (CreateOrReplace) requests. :paramtype sku: ~azure.mgmt.streamanalytics.models.Sku - :keyword identity: Describes the managed identity assigned to this job that can be used to - authenticate with inputs and outputs. - :paramtype identity: ~azure.mgmt.streamanalytics.models.Identity - :keyword sku_properties_sku: Describes the SKU of the streaming job. Required on PUT - (CreateOrReplace) requests. - :paramtype sku_properties_sku: ~azure.mgmt.streamanalytics.models.Sku :keyword job_type: Describes the type of the job. Valid modes are ``Cloud`` and 'Edge'. Known values are: "Cloud" and "Edge". :paramtype job_type: str or ~azure.mgmt.streamanalytics.models.JobType @@ -7808,15 +5956,12 @@ def __init__( # pylint: disable=too-many-locals Known values are: "SystemAccount" and "JobStorageAccount". :paramtype content_storage_policy: str or ~azure.mgmt.streamanalytics.models.ContentStoragePolicy - :keyword externals: The storage account where the custom code artifacts are located. - :paramtype externals: ~azure.mgmt.streamanalytics.models.External :keyword cluster: The cluster which streaming jobs will run on. :paramtype cluster: ~azure.mgmt.streamanalytics.models.ClusterInfo """ super().__init__(tags=tags, location=location, **kwargs) - self.sku = sku self.identity = identity - self.sku_properties_sku = sku_properties_sku + self.sku = sku self.job_id = None self.provisioning_state = None self.job_state = None @@ -7838,7 +5983,6 @@ def __init__( # pylint: disable=too-many-locals self.etag = None self.job_storage_account = job_storage_account self.content_storage_policy = content_storage_policy - self.externals = externals self.cluster = cluster @@ -7863,7 +6007,7 @@ class StreamingJobListResult(_serialization.Model): "next_link": {"key": "nextLink", "type": "str"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.value = None @@ -7895,8 +6039,6 @@ class StreamInputProperties(InputProperties): :ivar partition_key: partitionKey Describes a key in the input data which is used for partitioning the input data. :vartype partition_key: str - :ivar watermark_settings: Settings which determine whether to read watermark events. - :vartype watermark_settings: ~azure.mgmt.streamanalytics.models.InputWatermarkProperties :ivar datasource: Describes an input data source that contains stream data. Required on PUT (CreateOrReplace) requests. :vartype datasource: ~azure.mgmt.streamanalytics.models.StreamInputDataSource @@ -7915,7 +6057,6 @@ class StreamInputProperties(InputProperties): "etag": {"key": "etag", "type": "str"}, "compression": {"key": "compression", "type": "Compression"}, "partition_key": {"key": "partitionKey", "type": "str"}, - "watermark_settings": {"key": "watermarkSettings", "type": "InputWatermarkProperties"}, "datasource": {"key": "datasource", "type": "StreamInputDataSource"}, } @@ -7925,10 +6066,9 @@ def __init__( serialization: Optional["_models.Serialization"] = None, compression: Optional["_models.Compression"] = None, partition_key: Optional[str] = None, - watermark_settings: Optional["_models.InputWatermarkProperties"] = None, datasource: Optional["_models.StreamInputDataSource"] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword serialization: Describes how data from an input is serialized or how data is serialized when written to an output. Required on PUT (CreateOrReplace) requests. @@ -7938,20 +6078,12 @@ def __init__( :keyword partition_key: partitionKey Describes a key in the input data which is used for partitioning the input data. :paramtype partition_key: str - :keyword watermark_settings: Settings which determine whether to read watermark events. - :paramtype watermark_settings: ~azure.mgmt.streamanalytics.models.InputWatermarkProperties :keyword datasource: Describes an input data source that contains stream data. Required on PUT (CreateOrReplace) requests. :paramtype datasource: ~azure.mgmt.streamanalytics.models.StreamInputDataSource """ - super().__init__( - serialization=serialization, - compression=compression, - partition_key=partition_key, - watermark_settings=watermark_settings, - **kwargs - ) - self.type = "Stream" # type: str + super().__init__(serialization=serialization, compression=compression, partition_key=partition_key, **kwargs) + self.type: str = "Stream" self.datasource = datasource @@ -7987,7 +6119,7 @@ class SubscriptionQuota(SubResource): "current_count": {"key": "properties.currentCount", "type": "int"}, } - def __init__(self, *, name: Optional[str] = None, **kwargs): + def __init__(self, *, name: Optional[str] = None, **kwargs: Any) -> None: """ :keyword name: Resource name. :paramtype name: str @@ -7998,7 +6130,8 @@ def __init__(self, *, name: Optional[str] = None, **kwargs): class SubscriptionQuotasListResult(_serialization.Model): - """Result of the GetQuotas operation. It contains a list of quotas for the subscription in a particular region. + """Result of the GetQuotas operation. It contains a list of quotas for the subscription in a + particular region. Variables are only populated by the server, and will be ignored when sending a request. @@ -8014,168 +6147,15 @@ class SubscriptionQuotasListResult(_serialization.Model): "value": {"key": "value", "type": "[SubscriptionQuota]"}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ """ super().__init__(**kwargs) self.value = None -class TestDatasourceResult(Error): - """The result of the test input or output request. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar error: Error definition properties. - :vartype error: ~azure.mgmt.streamanalytics.models.ErrorError - :ivar status: The status of the sample output request. Known values are: "TestSucceeded" and - "TestFailed". - :vartype status: str or ~azure.mgmt.streamanalytics.models.TestDatasourceResultStatus - """ - - _validation = { - "status": {"readonly": True}, - } - - _attribute_map = { - "error": {"key": "error", "type": "ErrorError"}, - "status": {"key": "status", "type": "str"}, - } - - def __init__(self, *, error: Optional["_models.ErrorError"] = None, **kwargs): - """ - :keyword error: Error definition properties. - :paramtype error: ~azure.mgmt.streamanalytics.models.ErrorError - """ - super().__init__(error=error, **kwargs) - self.status = None - - -class TestInput(_serialization.Model): - """A stream analytics input. - - All required parameters must be populated in order to send to Azure. - - :ivar input: The stream analytics input to test. Required. - :vartype input: ~azure.mgmt.streamanalytics.models.Input - """ - - _validation = { - "input": {"required": True}, - } - - _attribute_map = { - "input": {"key": "input", "type": "Input"}, - } - - def __init__(self, *, input: "_models.Input", **kwargs): - """ - :keyword input: The stream analytics input to test. Required. - :paramtype input: ~azure.mgmt.streamanalytics.models.Input - """ - super().__init__(**kwargs) - self.input = input - - -class TestOutput(_serialization.Model): - """A stream analytics output. - - All required parameters must be populated in order to send to Azure. - - :ivar output: The stream analytics output to test. Required. - :vartype output: ~azure.mgmt.streamanalytics.models.Output - """ - - _validation = { - "output": {"required": True}, - } - - _attribute_map = { - "output": {"key": "output", "type": "Output"}, - } - - def __init__(self, *, output: "_models.Output", **kwargs): - """ - :keyword output: The stream analytics output to test. Required. - :paramtype output: ~azure.mgmt.streamanalytics.models.Output - """ - super().__init__(**kwargs) - self.output = output - - -class TestQuery(_serialization.Model): - """The request object for query testing. - - All required parameters must be populated in order to send to Azure. - - :ivar diagnostics: Diagnostics information related to query testing. - :vartype diagnostics: ~azure.mgmt.streamanalytics.models.TestQueryDiagnostics - :ivar streaming_job: Stream analytics job object which defines the input, output, and - transformation for the query testing. Required. - :vartype streaming_job: ~azure.mgmt.streamanalytics.models.StreamingJob - """ - - _validation = { - "streaming_job": {"required": True}, - } - - _attribute_map = { - "diagnostics": {"key": "diagnostics", "type": "TestQueryDiagnostics"}, - "streaming_job": {"key": "streamingJob", "type": "StreamingJob"}, - } - - def __init__( - self, - *, - streaming_job: "_models.StreamingJob", - diagnostics: Optional["_models.TestQueryDiagnostics"] = None, - **kwargs - ): - """ - :keyword diagnostics: Diagnostics information related to query testing. - :paramtype diagnostics: ~azure.mgmt.streamanalytics.models.TestQueryDiagnostics - :keyword streaming_job: Stream analytics job object which defines the input, output, and - transformation for the query testing. Required. - :paramtype streaming_job: ~azure.mgmt.streamanalytics.models.StreamingJob - """ - super().__init__(**kwargs) - self.diagnostics = diagnostics - self.streaming_job = streaming_job - - -class TestQueryDiagnostics(_serialization.Model): - """Diagnostics information related to query testing. - - All required parameters must be populated in order to send to Azure. - - :ivar write_uri: The SAS URI to the container or directory. Required. - :vartype write_uri: str - :ivar path: The path to the subdirectory. - :vartype path: str - """ - - _validation = { - "write_uri": {"required": True}, - } - - _attribute_map = { - "write_uri": {"key": "writeUri", "type": "str"}, - "path": {"key": "path", "type": "str"}, - } - - def __init__(self, *, write_uri: str, path: Optional[str] = None, **kwargs): - """ - :keyword write_uri: The SAS URI to the container or directory. Required. - :paramtype write_uri: str - :keyword path: The path to the subdirectory. - :paramtype path: str - """ - super().__init__(**kwargs) - self.write_uri = write_uri - self.path = path - - class Transformation(SubResource): - """A transformation object, containing all information associated with the named transformation. All transformations are contained under a streaming job. + """A transformation object, containing all information associated with the named transformation. + All transformations are contained under a streaming job. Variables are only populated by the server, and will be ignored when sending a request. @@ -8222,8 +6202,8 @@ def __init__( streaming_units: int = 3, valid_streaming_units: Optional[List[int]] = None, query: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword name: Resource name. :paramtype name: str diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_stream_analytics_management_client_enums.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_stream_analytics_management_client_enums.py index 6658735053c6..ba5c0b3a50c9 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_stream_analytics_management_client_enums.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/models/_stream_analytics_management_client_enums.py @@ -18,33 +18,26 @@ class AuthenticationMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): CONNECTION_STRING = "ConnectionString" -class BlobWriteMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Determines whether blob blocks are either committed automatically or appended.""" - - APPEND = "Append" - ONCE = "Once" - - class ClusterProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The status of the cluster provisioning. The three terminal states are: Succeeded, Failed and Canceled. """ - #: The cluster provisioning succeeded. SUCCEEDED = "Succeeded" - #: The cluster provisioning failed. + """The cluster provisioning succeeded.""" FAILED = "Failed" - #: The cluster provisioning was canceled. + """The cluster provisioning failed.""" CANCELED = "Canceled" - #: The cluster provisioning was inprogress. + """The cluster provisioning was canceled.""" IN_PROGRESS = "InProgress" + """The cluster provisioning was inprogress.""" class ClusterSkuName(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Specifies the SKU name of the cluster. Required on PUT (CreateOrUpdate) requests.""" - #: The default SKU. DEFAULT = "Default" + """The default SKU.""" class CompatibilityLevel(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -81,13 +74,6 @@ class Encoding(str, Enum, metaclass=CaseInsensitiveEnumMeta): UTF8 = "UTF8" -class EventGridEventSchemaType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Supported Event Grid schema types.""" - - EVENT_GRID_EVENT_SCHEMA = "EventGridEventSchema" - CLOUD_EVENT_SCHEMA = "CloudEventSchema" - - class EventSerializationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Indicates the type of serialization that the input or output uses. Required on PUT (CreateOrReplace) requests. @@ -96,7 +82,6 @@ class EventSerializationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): CSV = "Csv" AVRO = "Avro" JSON = "Json" - CUSTOM_CLR = "CustomClr" PARQUET = "Parquet" @@ -107,36 +92,29 @@ class EventsOutOfOrderPolicy(str, Enum, metaclass=CaseInsensitiveEnumMeta): DROP = "Drop" -class InputWatermarkMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The input watermark mode.""" - - NONE = "None" - READ_WATERMARK = "ReadWatermark" - - class JobState(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The current execution state of the streaming job.""" - #: The job is currently in the Created state. CREATED = "Created" - #: The job is currently in the Starting state. + """The job is currently in the Created state.""" STARTING = "Starting" - #: The job is currently in the Running state. + """The job is currently in the Starting state.""" RUNNING = "Running" - #: The job is currently in the Stopping state. + """The job is currently in the Running state.""" STOPPING = "Stopping" - #: The job is currently in the Stopped state. + """The job is currently in the Stopping state.""" STOPPED = "Stopped" - #: The job is currently in the Deleting state. + """The job is currently in the Stopped state.""" DELETING = "Deleting" - #: The job is currently in the Failed state. + """The job is currently in the Deleting state.""" FAILED = "Failed" - #: The job is currently in the Degraded state. + """The job is currently in the Failed state.""" DEGRADED = "Degraded" - #: The job is currently in the Restarting state. + """The job is currently in the Degraded state.""" RESTARTING = "Restarting" - #: The job is currently in the Scaling state. + """The job is currently in the Restarting state.""" SCALING = "Scaling" + """The job is currently in the Scaling state.""" class JobType(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -179,31 +157,6 @@ class OutputStartMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): LAST_OUTPUT_EVENT_TIME = "LastOutputEventTime" -class OutputWatermarkMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The output watermark mode.""" - - NONE = "None" - SEND_CURRENT_PARTITION_WATERMARK = "SendCurrentPartitionWatermark" - SEND_LOWEST_WATERMARK_ACROSS_PARTITIONS = "SendLowestWatermarkAcrossPartitions" - - -class QueryTestingResultStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The status of the query testing request.""" - - #: The query testing operation was initiated. - STARTED = "Started" - #: The query testing operation succeeded. - SUCCESS = "Success" - #: The query testing operation failed due to a compiler error. - COMPILER_ERROR = "CompilerError" - #: The query testing operation failed due to a runtime error. - RUNTIME_ERROR = "RuntimeError" - #: The query testing operation failed due to a timeout. - TIMEOUT = "Timeout" - #: The query testing operation failed due to an unknown error . - UNKNOWN_ERROR = "UnknownError" - - class RefreshType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Indicates the type of data refresh option.""" @@ -212,58 +165,7 @@ class RefreshType(str, Enum, metaclass=CaseInsensitiveEnumMeta): REFRESH_PERIODICALLY_WITH_DELTA = "RefreshPeriodicallyWithDelta" -class ResourceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The type of resource the SKU applies to.""" - - MICROSOFT_STREAM_ANALYTICS_STREAMINGJOBS = "Microsoft.StreamAnalytics/streamingjobs" - - -class SampleInputResultStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The status of the sample input request.""" - - #: The sample input operation successfully read all the events in the range. - READ_ALL_EVENTS_IN_RANGE = "ReadAllEventsInRange" - #: The sample input operation found no events in the range. - NO_EVENTS_FOUND_IN_RANGE = "NoEventsFoundInRange" - #: The sample input operation failed to connect to the input. - ERROR_CONNECTING_TO_INPUT = "ErrorConnectingToInput" - - -class SkuCapacityScaleType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The scale type applicable to the SKU.""" - - #: Supported scale type automatic. - AUTOMATIC = "automatic" - #: Supported scale type manual. - MANUAL = "manual" - #: Scaling not supported. - NONE = "none" - - class SkuName(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The name of the SKU. Required on PUT (CreateOrReplace) requests.""" STANDARD = "Standard" - - -class TestDatasourceResultStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The status of the test input or output request.""" - - #: The test datasource operation succeeded. - TEST_SUCCEEDED = "TestSucceeded" - #: The test datasource operation failed. - TEST_FAILED = "TestFailed" - - -class UpdatableUdfRefreshType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """This property indicates which data refresh option to use, Blocking or Nonblocking.""" - - BLOCKING = "Blocking" - NONBLOCKING = "Nonblocking" - - -class UpdateMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Refresh modes for Stream Analytics functions.""" - - STATIC = "Static" - REFRESHABLE = "Refreshable" diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/__init__.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/__init__.py index 0edde7ab94af..59ddeae83d21 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/__init__.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/__init__.py @@ -6,30 +6,28 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from ._functions_operations import FunctionsOperations -from ._inputs_operations import InputsOperations -from ._outputs_operations import OutputsOperations from ._operations import Operations from ._streaming_jobs_operations import StreamingJobsOperations -from ._sku_operations import SkuOperations -from ._subscriptions_operations import SubscriptionsOperations +from ._inputs_operations import InputsOperations +from ._outputs_operations import OutputsOperations from ._transformations_operations import TransformationsOperations +from ._functions_operations import FunctionsOperations +from ._subscriptions_operations import SubscriptionsOperations from ._clusters_operations import ClustersOperations from ._private_endpoints_operations import PrivateEndpointsOperations from ._patch import __all__ as _patch_all -from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import +from ._patch import * # pylint: disable=unused-wildcard-import from ._patch import patch_sdk as _patch_sdk __all__ = [ - "FunctionsOperations", - "InputsOperations", - "OutputsOperations", "Operations", "StreamingJobsOperations", - "SkuOperations", - "SubscriptionsOperations", + "InputsOperations", + "OutputsOperations", "TransformationsOperations", + "FunctionsOperations", + "SubscriptionsOperations", "ClustersOperations", "PrivateEndpointsOperations", ] diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_clusters_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_clusters_operations.py index 65842e4f2070..60dfc6c54285 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_clusters_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_clusters_operations.py @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -31,10 +32,6 @@ from .._serialization import Serializer from .._vendor import _convert_request, _format_url_section -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -54,10 +51,8 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2020-03-01-preview") - ) # type: Literal["2020-03-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -73,7 +68,7 @@ def build_create_or_update_request( "clusterName": _SERIALIZER.url("cluster_name", cluster_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -96,10 +91,8 @@ def build_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2020-03-01-preview") - ) # type: Literal["2020-03-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -115,7 +108,7 @@ def build_update_request( "clusterName": _SERIALIZER.url("cluster_name", cluster_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -134,9 +127,7 @@ def build_get_request(resource_group_name: str, cluster_name: str, subscription_ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2020-03-01-preview") - ) # type: Literal["2020-03-01-preview"] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -152,7 +143,7 @@ def build_get_request(resource_group_name: str, cluster_name: str, subscription_ "clusterName": _SERIALIZER.url("cluster_name", cluster_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -169,9 +160,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2020-03-01-preview") - ) # type: Literal["2020-03-01-preview"] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -187,7 +176,7 @@ def build_delete_request( "clusterName": _SERIALIZER.url("cluster_name", cluster_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -202,9 +191,7 @@ def build_list_by_subscription_request(subscription_id: str, **kwargs: Any) -> H _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2020-03-01-preview") - ) # type: Literal["2020-03-01-preview"] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -213,7 +200,7 @@ def build_list_by_subscription_request(subscription_id: str, **kwargs: Any) -> H "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -228,9 +215,7 @@ def build_list_by_resource_group_request(resource_group_name: str, subscription_ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2020-03-01-preview") - ) # type: Literal["2020-03-01-preview"] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -245,7 +230,7 @@ def build_list_by_resource_group_request(resource_group_name: str, subscription_ ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -262,9 +247,7 @@ def build_list_streaming_jobs_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2020-03-01-preview") - ) # type: Literal["2020-03-01-preview"] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -280,7 +263,7 @@ def build_list_streaming_jobs_request( "clusterName": _SERIALIZER.url("cluster_name", cluster_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -330,16 +313,14 @@ def _create_or_update_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2020-03-01-preview") - ) # type: Literal["2020-03-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.Cluster] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Cluster] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(cluster, (IO, bytes)): + if isinstance(cluster, (IOBase, bytes)): _content = cluster else: _json = self._serialize.body(cluster, "Cluster") @@ -359,10 +340,11 @@ def _create_or_update_initial( params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -379,11 +361,13 @@ def _create_or_update_initial( deserialized = self._deserialize("Cluster", pipeline_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized + return deserialized # type: ignore - _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}"} # type: ignore + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}" + } @overload def begin_create_or_update( @@ -495,7 +479,7 @@ def begin_create_or_update( :param cluster_name: The name of the cluster. Required. :type cluster_name: str :param cluster: The definition of the cluster that will be used to create a new cluster or - replace the existing one. Is either a model type or a IO type. Required. + replace the existing one. Is either a Cluster type or a IO type. Required. :type cluster: ~azure.mgmt.streamanalytics.models.Cluster or IO :param if_match: The ETag of the resource. Omit this value to always overwrite the current record set. Specify the last-seen ETag value to prevent accidentally overwriting concurrent @@ -523,16 +507,14 @@ def begin_create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2020-03-01-preview") - ) # type: Literal["2020-03-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.Cluster] - polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Cluster] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._create_or_update_initial( # type: ignore + raw_result = self._create_or_update_initial( resource_group_name=resource_group_name, cluster_name=cluster_name, cluster=cluster, @@ -554,7 +536,7 @@ def get_long_running_output(pipeline_response): return deserialized if polling is True: - polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) elif polling is False: polling_method = cast(PollingMethod, NoPolling()) else: @@ -566,9 +548,11 @@ def get_long_running_output(pipeline_response): client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}" + } def _update_initial( self, @@ -589,16 +573,14 @@ def _update_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2020-03-01-preview") - ) # type: Literal["2020-03-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.Cluster]] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.Cluster]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(cluster, (IO, bytes)): + if isinstance(cluster, (IOBase, bytes)): _content = cluster else: _json = self._serialize.body(cluster, "Cluster") @@ -617,10 +599,11 @@ def _update_initial( params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -639,7 +622,9 @@ def _update_initial( return deserialized - _update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}"} # type: ignore + _update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}" + } @overload def begin_update( @@ -743,7 +728,7 @@ def begin_update( :param cluster_name: The name of the cluster. Required. :type cluster_name: str :param cluster: The properties specified here will overwrite the corresponding properties in - the existing cluster (ie. Those properties will be updated). Is either a model type or a IO + the existing cluster (ie. Those properties will be updated). Is either a Cluster type or a IO type. Required. :type cluster: ~azure.mgmt.streamanalytics.models.Cluster or IO :param if_match: The ETag of the resource. Omit this value to always overwrite the current @@ -768,16 +753,14 @@ def begin_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2020-03-01-preview") - ) # type: Literal["2020-03-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.Cluster] - polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Cluster] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._update_initial( # type: ignore + raw_result = self._update_initial( resource_group_name=resource_group_name, cluster_name=cluster_name, cluster=cluster, @@ -798,7 +781,7 @@ def get_long_running_output(pipeline_response): return deserialized if polling is True: - polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) elif polling is False: polling_method = cast(PollingMethod, NoPolling()) else: @@ -810,9 +793,11 @@ def get_long_running_output(pipeline_response): client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}"} # type: ignore + begin_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}" + } @distributed_trace def get(self, resource_group_name: str, cluster_name: str, **kwargs: Any) -> _models.Cluster: @@ -839,10 +824,8 @@ def get(self, resource_group_name: str, cluster_name: str, **kwargs: Any) -> _mo _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2020-03-01-preview") - ) # type: Literal["2020-03-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[_models.Cluster] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.Cluster] = kwargs.pop("cls", None) request = build_get_request( resource_group_name=resource_group_name, @@ -854,10 +837,11 @@ def get(self, resource_group_name: str, cluster_name: str, **kwargs: Any) -> _mo params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -874,7 +858,9 @@ def get(self, resource_group_name: str, cluster_name: str, **kwargs: Any) -> _mo return deserialized - get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}"} # type: ignore + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}" + } def _delete_initial( # pylint: disable=inconsistent-return-statements self, resource_group_name: str, cluster_name: str, **kwargs: Any @@ -890,10 +876,8 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2020-03-01-preview") - ) # type: Literal["2020-03-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[None] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) request = build_delete_request( resource_group_name=resource_group_name, @@ -905,10 +889,11 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -921,7 +906,9 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}"} # type: ignore + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}" + } @distributed_trace def begin_delete(self, resource_group_name: str, cluster_name: str, **kwargs: Any) -> LROPoller[None]: @@ -947,13 +934,11 @@ def begin_delete(self, resource_group_name: str, cluster_name: str, **kwargs: An _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2020-03-01-preview") - ) # type: Literal["2020-03-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[None] - polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._delete_initial( # type: ignore resource_group_name=resource_group_name, @@ -971,7 +956,7 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- return cls(pipeline_response, None, {}) if polling is True: - polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) elif polling is False: polling_method = cast(PollingMethod, NoPolling()) else: @@ -983,9 +968,11 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}" + } @distributed_trace def list_by_subscription(self, **kwargs: Any) -> Iterable["_models.Cluster"]: @@ -999,10 +986,8 @@ def list_by_subscription(self, **kwargs: Any) -> Iterable["_models.Cluster"]: _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2020-03-01-preview") - ) # type: Literal["2020-03-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[_models.ClusterListResult] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ClusterListResult] = kwargs.pop("cls", None) error_map = { 401: ClientAuthenticationError, @@ -1023,12 +1008,23 @@ def prepare_request(next_link=None): params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) else: - request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) request.method = "GET" return request @@ -1036,14 +1032,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("ClusterListResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1056,7 +1053,9 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) - list_by_subscription.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/clusters"} # type: ignore + list_by_subscription.metadata = { + "url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/clusters" + } @distributed_trace def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Iterable["_models.Cluster"]: @@ -1073,10 +1072,8 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Ite _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2020-03-01-preview") - ) # type: Literal["2020-03-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[_models.ClusterListResult] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ClusterListResult] = kwargs.pop("cls", None) error_map = { 401: ClientAuthenticationError, @@ -1098,12 +1095,23 @@ def prepare_request(next_link=None): params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) else: - request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) request.method = "GET" return request @@ -1111,14 +1119,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("ClusterListResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1131,7 +1140,9 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) - list_by_resource_group.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters"} # type: ignore + list_by_resource_group.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters" + } @distributed_trace def list_streaming_jobs( @@ -1152,10 +1163,8 @@ def list_streaming_jobs( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2020-03-01-preview") - ) # type: Literal["2020-03-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[_models.ClusterJobListResult] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ClusterJobListResult] = kwargs.pop("cls", None) error_map = { 401: ClientAuthenticationError, @@ -1178,12 +1187,23 @@ def prepare_request(next_link=None): params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) else: - request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) request.method = "GET" return request @@ -1191,14 +1211,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("ClusterJobListResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1211,4 +1232,6 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) - list_streaming_jobs.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/listStreamingJobs"} # type: ignore + list_streaming_jobs.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/listStreamingJobs" + } diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_functions_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_functions_operations.py index 5eeb5de995fc..5d6809849e5f 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_functions_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_functions_operations.py @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -31,10 +32,6 @@ from .._serialization import Serializer from .._vendor import _convert_request, _format_url_section -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -55,10 +52,8 @@ def build_create_or_replace_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -75,7 +70,7 @@ def build_create_or_replace_request( "functionName": _SERIALIZER.url("function_name", function_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -104,10 +99,8 @@ def build_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -124,7 +117,7 @@ def build_update_request( "functionName": _SERIALIZER.url("function_name", function_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -145,9 +138,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -164,7 +155,7 @@ def build_delete_request( "functionName": _SERIALIZER.url("function_name", function_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -181,9 +172,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -200,7 +189,7 @@ def build_get_request( "functionName": _SERIALIZER.url("function_name", function_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -217,9 +206,7 @@ def build_list_by_streaming_job_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -235,7 +222,7 @@ def build_list_by_streaming_job_request( "jobName": _SERIALIZER.url("job_name", job_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore # Construct parameters if select is not None: @@ -254,10 +241,8 @@ def build_test_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -274,7 +259,7 @@ def build_test_request( "functionName": _SERIALIZER.url("function_name", function_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -293,10 +278,8 @@ def build_retrieve_default_definition_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -313,7 +296,7 @@ def build_retrieve_default_definition_request( "functionName": _SERIALIZER.url("function_name", function_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -450,7 +433,7 @@ def create_or_replace( :param function_name: The name of the function. Required. :type function_name: str :param function: The definition of the function that will be used to create a new function or - replace the existing one under the streaming job. Is either a model type or a IO type. + replace the existing one under the streaming job. Is either a Function type or a IO type. Required. :type function: ~azure.mgmt.streamanalytics.models.Function or IO :param if_match: The ETag of the function. Omit this value to always overwrite the current @@ -480,16 +463,14 @@ def create_or_replace( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.Function] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Function] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(function, (IO, bytes)): + if isinstance(function, (IOBase, bytes)): _content = function else: _json = self._serialize.body(function, "Function") @@ -510,10 +491,11 @@ def create_or_replace( params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -535,11 +517,13 @@ def create_or_replace( deserialized = self._deserialize("Function", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore - create_or_replace.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}"} # type: ignore + create_or_replace.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}" + } @overload def update( @@ -648,7 +632,7 @@ def update( corresponding properties in the existing function (ie. Those properties will be updated). Any properties that are set to null here will mean that the corresponding property in the existing function will remain the same and not change as a result of this PATCH operation. Is either a - model type or a IO type. Required. + Function type or a IO type. Required. :type function: ~azure.mgmt.streamanalytics.models.Function or IO :param if_match: The ETag of the function. Omit this value to always overwrite the current function. Specify the last-seen ETag value to prevent accidentally overwriting concurrent @@ -673,16 +657,14 @@ def update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.Function] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Function] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(function, (IO, bytes)): + if isinstance(function, (IOBase, bytes)): _content = function else: _json = self._serialize.body(function, "Function") @@ -702,10 +684,11 @@ def update( params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -725,7 +708,9 @@ def update( return deserialized - update.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}"} # type: ignore + update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}" + } @distributed_trace def delete( # pylint: disable=inconsistent-return-statements @@ -756,10 +741,8 @@ def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[None] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) request = build_delete_request( resource_group_name=resource_group_name, @@ -772,10 +755,11 @@ def delete( # pylint: disable=inconsistent-return-statements params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -788,7 +772,9 @@ def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}"} # type: ignore + delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}" + } @distributed_trace def get(self, resource_group_name: str, job_name: str, function_name: str, **kwargs: Any) -> _models.Function: @@ -817,10 +803,8 @@ def get(self, resource_group_name: str, job_name: str, function_name: str, **kwa _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[_models.Function] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.Function] = kwargs.pop("cls", None) request = build_get_request( resource_group_name=resource_group_name, @@ -833,10 +817,11 @@ def get(self, resource_group_name: str, job_name: str, function_name: str, **kwa params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -856,7 +841,9 @@ def get(self, resource_group_name: str, job_name: str, function_name: str, **kwa return deserialized - get.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}"} # type: ignore + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}" + } @distributed_trace def list_by_streaming_job( @@ -882,10 +869,8 @@ def list_by_streaming_job( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[_models.FunctionListResult] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.FunctionListResult] = kwargs.pop("cls", None) error_map = { 401: ClientAuthenticationError, @@ -909,12 +894,23 @@ def prepare_request(next_link=None): params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) else: - request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) request.method = "GET" return request @@ -922,14 +918,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("FunctionListResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -942,7 +939,9 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) - list_by_streaming_job.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions"} # type: ignore + list_by_streaming_job.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions" + } def _test_initial( self, @@ -963,16 +962,14 @@ def _test_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.ResourceTestStatus]] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.ResourceTestStatus]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(function, (IO, bytes)): + if isinstance(function, (IOBase, bytes)): _content = function else: if function is not None: @@ -994,10 +991,11 @@ def _test_initial( params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1016,7 +1014,9 @@ def _test_initial( return deserialized - _test_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}/test"} # type: ignore + _test_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}/test" + } @overload def begin_test( @@ -1134,8 +1134,8 @@ def begin_test( the full function definition intended to be tested. If the function specified already exists, this parameter can be left null to test the existing function as is or if specified, the properties specified will overwrite the corresponding properties in the existing function - (exactly like a PATCH operation) and the resulting function will be tested. Is either a model - type or a IO type. Default value is None. + (exactly like a PATCH operation) and the resulting function will be tested. Is either a + Function type or a IO type. Default value is None. :type function: ~azure.mgmt.streamanalytics.models.Function or IO :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. Default value is None. @@ -1156,16 +1156,14 @@ def begin_test( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.ResourceTestStatus] - polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ResourceTestStatus] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._test_initial( # type: ignore + raw_result = self._test_initial( resource_group_name=resource_group_name, job_name=job_name, function_name=function_name, @@ -1186,7 +1184,7 @@ def get_long_running_output(pipeline_response): return deserialized if polling is True: - polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) elif polling is False: polling_method = cast(PollingMethod, NoPolling()) else: @@ -1198,9 +1196,11 @@ def get_long_running_output(pipeline_response): client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_test.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}/test"} # type: ignore + begin_test.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}/test" + } @overload def retrieve_default_definition( @@ -1290,8 +1290,8 @@ def retrieve_default_definition( :param function_name: The name of the function. Required. :type function_name: str :param function_retrieve_default_definition_parameters: Parameters used to specify the type of - function to retrieve the default definition for. Is either a model type or a IO type. Default - value is None. + function to retrieve the default definition for. Is either a + FunctionRetrieveDefaultDefinitionParameters type or a IO type. Default value is None. :type function_retrieve_default_definition_parameters: ~azure.mgmt.streamanalytics.models.FunctionRetrieveDefaultDefinitionParameters or IO :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. @@ -1313,16 +1313,14 @@ def retrieve_default_definition( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.Function] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Function] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(function_retrieve_default_definition_parameters, (IO, bytes)): + if isinstance(function_retrieve_default_definition_parameters, (IOBase, bytes)): _content = function_retrieve_default_definition_parameters else: if function_retrieve_default_definition_parameters is not None: @@ -1346,10 +1344,11 @@ def retrieve_default_definition( params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1366,4 +1365,6 @@ def retrieve_default_definition( return deserialized - retrieve_default_definition.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}/retrieveDefaultDefinition"} # type: ignore + retrieve_default_definition.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/functions/{functionName}/retrieveDefaultDefinition" + } diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_inputs_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_inputs_operations.py index a0c5432f4259..02154f8dae57 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_inputs_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_inputs_operations.py @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -31,10 +32,6 @@ from .._serialization import Serializer from .._vendor import _convert_request, _format_url_section -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -55,10 +52,8 @@ def build_create_or_replace_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -75,7 +70,7 @@ def build_create_or_replace_request( "inputName": _SERIALIZER.url("input_name", input_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -104,10 +99,8 @@ def build_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -124,7 +117,7 @@ def build_update_request( "inputName": _SERIALIZER.url("input_name", input_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -145,9 +138,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -164,7 +155,7 @@ def build_delete_request( "inputName": _SERIALIZER.url("input_name", input_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -181,9 +172,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -200,7 +189,7 @@ def build_get_request( "inputName": _SERIALIZER.url("input_name", input_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -217,9 +206,7 @@ def build_list_by_streaming_job_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -235,7 +222,7 @@ def build_list_by_streaming_job_request( "jobName": _SERIALIZER.url("job_name", job_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore # Construct parameters if select is not None: @@ -254,10 +241,8 @@ def build_test_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -274,7 +259,7 @@ def build_test_request( "inputName": _SERIALIZER.url("input_name", input_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -411,7 +396,7 @@ def create_or_replace( :param input_name: The name of the input. Required. :type input_name: str :param input: The definition of the input that will be used to create a new input or replace - the existing one under the streaming job. Is either a model type or a IO type. Required. + the existing one under the streaming job. Is either a Input type or a IO type. Required. :type input: ~azure.mgmt.streamanalytics.models.Input or IO :param if_match: The ETag of the input. Omit this value to always overwrite the current input. Specify the last-seen ETag value to prevent accidentally overwriting concurrent changes. @@ -440,16 +425,14 @@ def create_or_replace( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.Input] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Input] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(input, (IO, bytes)): + if isinstance(input, (IOBase, bytes)): _content = input else: _json = self._serialize.body(input, "Input") @@ -470,10 +453,11 @@ def create_or_replace( params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -495,11 +479,13 @@ def create_or_replace( deserialized = self._deserialize("Input", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore - create_or_replace.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}"} # type: ignore + create_or_replace.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}" + } @overload def update( @@ -607,7 +593,7 @@ def update( :param input: An Input object. The properties specified here will overwrite the corresponding properties in the existing input (ie. Those properties will be updated). Any properties that are set to null here will mean that the corresponding property in the existing input will - remain the same and not change as a result of this PATCH operation. Is either a model type or a + remain the same and not change as a result of this PATCH operation. Is either a Input type or a IO type. Required. :type input: ~azure.mgmt.streamanalytics.models.Input or IO :param if_match: The ETag of the input. Omit this value to always overwrite the current input. @@ -633,16 +619,14 @@ def update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.Input] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Input] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(input, (IO, bytes)): + if isinstance(input, (IOBase, bytes)): _content = input else: _json = self._serialize.body(input, "Input") @@ -662,10 +646,11 @@ def update( params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -685,7 +670,9 @@ def update( return deserialized - update.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}"} # type: ignore + update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}" + } @distributed_trace def delete( # pylint: disable=inconsistent-return-statements @@ -716,10 +703,8 @@ def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[None] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) request = build_delete_request( resource_group_name=resource_group_name, @@ -732,10 +717,11 @@ def delete( # pylint: disable=inconsistent-return-statements params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -748,7 +734,9 @@ def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}"} # type: ignore + delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}" + } @distributed_trace def get(self, resource_group_name: str, job_name: str, input_name: str, **kwargs: Any) -> _models.Input: @@ -777,10 +765,8 @@ def get(self, resource_group_name: str, job_name: str, input_name: str, **kwargs _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[_models.Input] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.Input] = kwargs.pop("cls", None) request = build_get_request( resource_group_name=resource_group_name, @@ -793,10 +779,11 @@ def get(self, resource_group_name: str, job_name: str, input_name: str, **kwargs params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -816,7 +803,9 @@ def get(self, resource_group_name: str, job_name: str, input_name: str, **kwargs return deserialized - get.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}"} # type: ignore + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}" + } @distributed_trace def list_by_streaming_job( @@ -842,10 +831,8 @@ def list_by_streaming_job( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[_models.InputListResult] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.InputListResult] = kwargs.pop("cls", None) error_map = { 401: ClientAuthenticationError, @@ -869,12 +856,23 @@ def prepare_request(next_link=None): params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) else: - request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) request.method = "GET" return request @@ -882,14 +880,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("InputListResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -902,7 +901,9 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) - list_by_streaming_job.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs"} # type: ignore + list_by_streaming_job.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs" + } def _test_initial( self, @@ -923,16 +924,14 @@ def _test_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.ResourceTestStatus]] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.ResourceTestStatus]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(input, (IO, bytes)): + if isinstance(input, (IOBase, bytes)): _content = input else: if input is not None: @@ -954,10 +953,11 @@ def _test_initial( params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -976,7 +976,9 @@ def _test_initial( return deserialized - _test_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}/test"} # type: ignore + _test_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}/test" + } @overload def begin_test( @@ -1089,7 +1091,7 @@ def begin_test( full input definition intended to be tested. If the input specified already exists, this parameter can be left null to test the existing input as is or if specified, the properties specified will overwrite the corresponding properties in the existing input (exactly like a - PATCH operation) and the resulting input will be tested. Is either a model type or a IO type. + PATCH operation) and the resulting input will be tested. Is either a Input type or a IO type. Default value is None. :type input: ~azure.mgmt.streamanalytics.models.Input or IO :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. @@ -1111,16 +1113,14 @@ def begin_test( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.ResourceTestStatus] - polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ResourceTestStatus] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._test_initial( # type: ignore + raw_result = self._test_initial( resource_group_name=resource_group_name, job_name=job_name, input_name=input_name, @@ -1141,7 +1141,7 @@ def get_long_running_output(pipeline_response): return deserialized if polling is True: - polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) elif polling is False: polling_method = cast(PollingMethod, NoPolling()) else: @@ -1153,6 +1153,8 @@ def get_long_running_output(pipeline_response): client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_test.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}/test"} # type: ignore + begin_test.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/inputs/{inputName}/test" + } diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_operations.py index a17dc01f05a2..c93a95cea9b2 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_operations.py @@ -6,8 +6,8 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys from typing import Any, Callable, Dict, Iterable, Optional, TypeVar +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -29,10 +29,6 @@ from .._serialization import Serializer from .._vendor import _convert_request -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -44,9 +40,7 @@ def build_list_request(**kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -92,10 +86,8 @@ def list(self, **kwargs: Any) -> Iterable["_models.Operation"]: _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[_models.OperationListResult] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.OperationListResult] = kwargs.pop("cls", None) error_map = { 401: ClientAuthenticationError, @@ -115,12 +107,23 @@ def prepare_request(next_link=None): params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) else: - request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) request.method = "GET" return request @@ -128,14 +131,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("OperationListResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -148,4 +152,4 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) - list.metadata = {"url": "/providers/Microsoft.StreamAnalytics/operations"} # type: ignore + list.metadata = {"url": "/providers/Microsoft.StreamAnalytics/operations"} diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_outputs_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_outputs_operations.py index af9c15a6475d..af3c2c133341 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_outputs_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_outputs_operations.py @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -31,10 +32,6 @@ from .._serialization import Serializer from .._vendor import _convert_request, _format_url_section -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -55,10 +52,8 @@ def build_create_or_replace_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -75,7 +70,7 @@ def build_create_or_replace_request( "outputName": _SERIALIZER.url("output_name", output_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -104,10 +99,8 @@ def build_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -124,7 +117,7 @@ def build_update_request( "outputName": _SERIALIZER.url("output_name", output_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -145,9 +138,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -164,7 +155,7 @@ def build_delete_request( "outputName": _SERIALIZER.url("output_name", output_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -181,9 +172,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -200,7 +189,7 @@ def build_get_request( "outputName": _SERIALIZER.url("output_name", output_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -217,9 +206,7 @@ def build_list_by_streaming_job_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -235,7 +222,7 @@ def build_list_by_streaming_job_request( "jobName": _SERIALIZER.url("job_name", job_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore # Construct parameters if select is not None: @@ -254,10 +241,8 @@ def build_test_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -274,7 +259,7 @@ def build_test_request( "outputName": _SERIALIZER.url("output_name", output_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -411,7 +396,7 @@ def create_or_replace( :param output_name: The name of the output. Required. :type output_name: str :param output: The definition of the output that will be used to create a new output or replace - the existing one under the streaming job. Is either a model type or a IO type. Required. + the existing one under the streaming job. Is either a Output type or a IO type. Required. :type output: ~azure.mgmt.streamanalytics.models.Output or IO :param if_match: The ETag of the output. Omit this value to always overwrite the current output. Specify the last-seen ETag value to prevent accidentally overwriting concurrent @@ -440,16 +425,14 @@ def create_or_replace( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.Output] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Output] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(output, (IO, bytes)): + if isinstance(output, (IOBase, bytes)): _content = output else: _json = self._serialize.body(output, "Output") @@ -470,10 +453,11 @@ def create_or_replace( params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -495,11 +479,13 @@ def create_or_replace( deserialized = self._deserialize("Output", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore - create_or_replace.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}"} # type: ignore + create_or_replace.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}" + } @overload def update( @@ -607,8 +593,8 @@ def update( :param output: An Output object. The properties specified here will overwrite the corresponding properties in the existing output (ie. Those properties will be updated). Any properties that are set to null here will mean that the corresponding property in the existing output will - remain the same and not change as a result of this PATCH operation. Is either a model type or a - IO type. Required. + remain the same and not change as a result of this PATCH operation. Is either a Output type or + a IO type. Required. :type output: ~azure.mgmt.streamanalytics.models.Output or IO :param if_match: The ETag of the output. Omit this value to always overwrite the current output. Specify the last-seen ETag value to prevent accidentally overwriting concurrent @@ -633,16 +619,14 @@ def update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.Output] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Output] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(output, (IO, bytes)): + if isinstance(output, (IOBase, bytes)): _content = output else: _json = self._serialize.body(output, "Output") @@ -662,10 +646,11 @@ def update( params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -685,7 +670,9 @@ def update( return deserialized - update.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}"} # type: ignore + update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}" + } @distributed_trace def delete( # pylint: disable=inconsistent-return-statements @@ -716,10 +703,8 @@ def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[None] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) request = build_delete_request( resource_group_name=resource_group_name, @@ -732,10 +717,11 @@ def delete( # pylint: disable=inconsistent-return-statements params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -748,7 +734,9 @@ def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}"} # type: ignore + delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}" + } @distributed_trace def get(self, resource_group_name: str, job_name: str, output_name: str, **kwargs: Any) -> _models.Output: @@ -777,10 +765,8 @@ def get(self, resource_group_name: str, job_name: str, output_name: str, **kwarg _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[_models.Output] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.Output] = kwargs.pop("cls", None) request = build_get_request( resource_group_name=resource_group_name, @@ -793,10 +779,11 @@ def get(self, resource_group_name: str, job_name: str, output_name: str, **kwarg params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -816,7 +803,9 @@ def get(self, resource_group_name: str, job_name: str, output_name: str, **kwarg return deserialized - get.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}"} # type: ignore + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}" + } @distributed_trace def list_by_streaming_job( @@ -842,10 +831,8 @@ def list_by_streaming_job( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[_models.OutputListResult] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.OutputListResult] = kwargs.pop("cls", None) error_map = { 401: ClientAuthenticationError, @@ -869,12 +856,23 @@ def prepare_request(next_link=None): params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) else: - request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) request.method = "GET" return request @@ -882,14 +880,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("OutputListResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -902,7 +901,9 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) - list_by_streaming_job.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs"} # type: ignore + list_by_streaming_job.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs" + } def _test_initial( self, @@ -923,16 +924,14 @@ def _test_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.ResourceTestStatus]] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.ResourceTestStatus]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(output, (IO, bytes)): + if isinstance(output, (IOBase, bytes)): _content = output else: if output is not None: @@ -954,10 +953,11 @@ def _test_initial( params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -976,7 +976,9 @@ def _test_initial( return deserialized - _test_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}/test"} # type: ignore + _test_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}/test" + } @overload def begin_test( @@ -1089,7 +1091,7 @@ def begin_test( full output definition intended to be tested. If the output specified already exists, this parameter can be left null to test the existing output as is or if specified, the properties specified will overwrite the corresponding properties in the existing output (exactly like a - PATCH operation) and the resulting output will be tested. Is either a model type or a IO type. + PATCH operation) and the resulting output will be tested. Is either a Output type or a IO type. Default value is None. :type output: ~azure.mgmt.streamanalytics.models.Output or IO :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. @@ -1111,16 +1113,14 @@ def begin_test( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.ResourceTestStatus] - polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ResourceTestStatus] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._test_initial( # type: ignore + raw_result = self._test_initial( resource_group_name=resource_group_name, job_name=job_name, output_name=output_name, @@ -1141,7 +1141,7 @@ def get_long_running_output(pipeline_response): return deserialized if polling is True: - polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) elif polling is False: polling_method = cast(PollingMethod, NoPolling()) else: @@ -1153,6 +1153,8 @@ def get_long_running_output(pipeline_response): client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_test.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}/test"} # type: ignore + begin_test.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/outputs/{outputName}/test" + } diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_private_endpoints_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_private_endpoints_operations.py index 597133ec8c76..a0e16d172299 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_private_endpoints_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_private_endpoints_operations.py @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -31,10 +32,6 @@ from .._serialization import Serializer from .._vendor import _convert_request, _format_url_section -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -55,10 +52,8 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2020-03-01-preview") - ) # type: Literal["2020-03-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -75,7 +70,7 @@ def build_create_or_update_request( "privateEndpointName": _SERIALIZER.url("private_endpoint_name", private_endpoint_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -98,9 +93,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2020-03-01-preview") - ) # type: Literal["2020-03-01-preview"] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -117,7 +110,7 @@ def build_get_request( "privateEndpointName": _SERIALIZER.url("private_endpoint_name", private_endpoint_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -134,9 +127,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2020-03-01-preview") - ) # type: Literal["2020-03-01-preview"] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -153,7 +144,7 @@ def build_delete_request( "privateEndpointName": _SERIALIZER.url("private_endpoint_name", private_endpoint_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -170,9 +161,7 @@ def build_list_by_cluster_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2020-03-01-preview") - ) # type: Literal["2020-03-01-preview"] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -188,7 +177,7 @@ def build_list_by_cluster_request( "clusterName": _SERIALIZER.url("cluster_name", cluster_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -323,7 +312,8 @@ def create_or_update( :param private_endpoint_name: The name of the private endpoint. Required. :type private_endpoint_name: str :param private_endpoint: The definition of the private endpoint that will be used to create a - new cluster or replace the existing one. Is either a model type or a IO type. Required. + new cluster or replace the existing one. Is either a PrivateEndpoint type or a IO type. + Required. :type private_endpoint: ~azure.mgmt.streamanalytics.models.PrivateEndpoint or IO :param if_match: The ETag of the resource. Omit this value to always overwrite the current record set. Specify the last-seen ETag value to prevent accidentally overwriting concurrent @@ -352,16 +342,14 @@ def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2020-03-01-preview") - ) # type: Literal["2020-03-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.PrivateEndpoint] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.PrivateEndpoint] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(private_endpoint, (IO, bytes)): + if isinstance(private_endpoint, (IOBase, bytes)): _content = private_endpoint else: _json = self._serialize.body(private_endpoint, "PrivateEndpoint") @@ -382,10 +370,11 @@ def create_or_update( params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -402,11 +391,13 @@ def create_or_update( deserialized = self._deserialize("PrivateEndpoint", pipeline_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized + return deserialized # type: ignore - create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}"} # type: ignore + create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}" + } @distributed_trace def get( @@ -437,10 +428,8 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2020-03-01-preview") - ) # type: Literal["2020-03-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[_models.PrivateEndpoint] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.PrivateEndpoint] = kwargs.pop("cls", None) request = build_get_request( resource_group_name=resource_group_name, @@ -453,10 +442,11 @@ def get( params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -473,7 +463,9 @@ def get( return deserialized - get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}"} # type: ignore + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}" + } def _delete_initial( # pylint: disable=inconsistent-return-statements self, resource_group_name: str, cluster_name: str, private_endpoint_name: str, **kwargs: Any @@ -489,10 +481,8 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2020-03-01-preview") - ) # type: Literal["2020-03-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[None] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) request = build_delete_request( resource_group_name=resource_group_name, @@ -505,10 +495,11 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -521,7 +512,9 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}"} # type: ignore + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}" + } @distributed_trace def begin_delete( @@ -551,13 +544,11 @@ def begin_delete( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2020-03-01-preview") - ) # type: Literal["2020-03-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[None] - polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._delete_initial( # type: ignore resource_group_name=resource_group_name, @@ -576,7 +567,7 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- return cls(pipeline_response, None, {}) if polling is True: - polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) elif polling is False: polling_method = cast(PollingMethod, NoPolling()) else: @@ -588,9 +579,11 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints/{privateEndpointName}" + } @distributed_trace def list_by_cluster( @@ -611,10 +604,8 @@ def list_by_cluster( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2020-03-01-preview") - ) # type: Literal["2020-03-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[_models.PrivateEndpointListResult] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.PrivateEndpointListResult] = kwargs.pop("cls", None) error_map = { 401: ClientAuthenticationError, @@ -637,12 +628,23 @@ def prepare_request(next_link=None): params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) else: - request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) request.method = "GET" return request @@ -650,14 +652,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("PrivateEndpointListResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -670,4 +673,6 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) - list_by_cluster.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints"} # type: ignore + list_by_cluster.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/clusters/{clusterName}/privateEndpoints" + } diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_sku_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_sku_operations.py deleted file mode 100644 index 51242ec51e4d..000000000000 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_sku_operations.py +++ /dev/null @@ -1,175 +0,0 @@ -# pylint: disable=too-many-lines -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -import sys -from typing import Any, Callable, Dict, Iterable, Optional, TypeVar - -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest -from azure.core.tracing.decorator import distributed_trace -from azure.core.utils import case_insensitive_dict -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models as _models -from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section - -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -_SERIALIZER = Serializer() -_SERIALIZER.client_side_validation = False - - -def build_list_request(resource_group_name: str, job_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/skus", - ) # pylint: disable=line-too-long - path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - "resourceGroupName": _SERIALIZER.url( - "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" - ), - "jobName": _SERIALIZER.url("job_name", job_name, "str"), - } - - _url = _format_url_section(_url, **path_format_arguments) - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -class SkuOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.mgmt.streamanalytics.StreamAnalyticsManagementClient`'s - :attr:`sku` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs): - input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def list( - self, resource_group_name: str, job_name: str, **kwargs: Any - ) -> Iterable["_models.GetStreamingJobSkuResult"]: - """Gets a list of available SKUs about the specified streaming job. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - Required. - :type resource_group_name: str - :param job_name: The name of the streaming job. Required. - :type job_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either GetStreamingJobSkuResult or the result of - cls(response) - :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.streamanalytics.models.GetStreamingJobSkuResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[_models.GetStreamingJobSkuResults] - - error_map = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - request = build_list_request( - resource_group_name=resource_group_name, - job_name=job_name, - subscription_id=self._config.subscription_id, - api_version=api_version, - template_url=self.list.metadata["url"], - headers=_headers, - params=_params, - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore - - else: - request = HttpRequest("GET", next_link) - request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore - request.method = "GET" - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize("GetStreamingJobSkuResults", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged(get_next, extract_data) - - list.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/skus"} # type: ignore diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_streaming_jobs_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_streaming_jobs_operations.py index 855655896480..3b76560f23c2 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_streaming_jobs_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_streaming_jobs_operations.py @@ -6,8 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +import urllib.parse from azure.core.exceptions import ( ClientAuthenticationError, @@ -31,10 +32,6 @@ from .._serialization import Serializer from .._vendor import _convert_request, _format_url_section -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -54,10 +51,8 @@ def build_create_or_replace_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -73,7 +68,7 @@ def build_create_or_replace_request( "jobName": _SERIALIZER.url("job_name", job_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -96,10 +91,8 @@ def build_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -115,7 +108,7 @@ def build_update_request( "jobName": _SERIALIZER.url("job_name", job_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -134,9 +127,7 @@ def build_delete_request(resource_group_name: str, job_name: str, subscription_i _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -152,7 +143,7 @@ def build_delete_request(resource_group_name: str, job_name: str, subscription_i "jobName": _SERIALIZER.url("job_name", job_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -169,9 +160,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -187,7 +176,7 @@ def build_get_request( "jobName": _SERIALIZER.url("job_name", job_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore # Construct parameters if expand is not None: @@ -206,9 +195,7 @@ def build_list_by_resource_group_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -223,7 +210,7 @@ def build_list_by_resource_group_request( ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore # Construct parameters if expand is not None: @@ -240,9 +227,7 @@ def build_list_request(subscription_id: str, *, expand: Optional[str] = None, ** _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -253,7 +238,7 @@ def build_list_request(subscription_id: str, *, expand: Optional[str] = None, ** "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore # Construct parameters if expand is not None: @@ -270,10 +255,8 @@ def build_start_request(resource_group_name: str, job_name: str, subscription_id _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -289,7 +272,7 @@ def build_start_request(resource_group_name: str, job_name: str, subscription_id "jobName": _SERIALIZER.url("job_name", job_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -306,9 +289,7 @@ def build_stop_request(resource_group_name: str, job_name: str, subscription_id: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -324,7 +305,7 @@ def build_stop_request(resource_group_name: str, job_name: str, subscription_id: "jobName": _SERIALIZER.url("job_name", job_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -339,10 +320,8 @@ def build_scale_request(resource_group_name: str, job_name: str, subscription_id _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -358,7 +337,7 @@ def build_scale_request(resource_group_name: str, job_name: str, subscription_id "jobName": _SERIALIZER.url("job_name", job_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -410,16 +389,14 @@ def _create_or_replace_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.StreamingJob] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.StreamingJob] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(streaming_job, (IO, bytes)): + if isinstance(streaming_job, (IOBase, bytes)): _content = streaming_job else: _json = self._serialize.body(streaming_job, "StreamingJob") @@ -439,10 +416,11 @@ def _create_or_replace_initial( params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -464,11 +442,13 @@ def _create_or_replace_initial( deserialized = self._deserialize("StreamingJob", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore - _create_or_replace_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}"} # type: ignore + _create_or_replace_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}" + } @overload def begin_create_or_replace( @@ -582,7 +562,8 @@ def begin_create_or_replace( :param job_name: The name of the streaming job. Required. :type job_name: str :param streaming_job: The definition of the streaming job that will be used to create a new - streaming job or replace the existing one. Is either a model type or a IO type. Required. + streaming job or replace the existing one. Is either a StreamingJob type or a IO type. + Required. :type streaming_job: ~azure.mgmt.streamanalytics.models.StreamingJob or IO :param if_match: The ETag of the streaming job. Omit this value to always overwrite the current record set. Specify the last-seen ETag value to prevent accidentally overwriting concurrent @@ -611,16 +592,14 @@ def begin_create_or_replace( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.StreamingJob] - polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.StreamingJob] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._create_or_replace_initial( # type: ignore + raw_result = self._create_or_replace_initial( resource_group_name=resource_group_name, job_name=job_name, streaming_job=streaming_job, @@ -646,7 +625,7 @@ def get_long_running_output(pipeline_response): return deserialized if polling is True: - polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) elif polling is False: polling_method = cast(PollingMethod, NoPolling()) else: @@ -658,9 +637,11 @@ def get_long_running_output(pipeline_response): client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_replace.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}"} # type: ignore + begin_create_or_replace.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}" + } @overload def update( @@ -759,7 +740,7 @@ def update( corresponding properties in the existing streaming job (ie. Those properties will be updated). Any properties that are set to null here will mean that the corresponding property in the existing input will remain the same and not change as a result of this PATCH operation. Is - either a model type or a IO type. Required. + either a StreamingJob type or a IO type. Required. :type streaming_job: ~azure.mgmt.streamanalytics.models.StreamingJob or IO :param if_match: The ETag of the streaming job. Omit this value to always overwrite the current record set. Specify the last-seen ETag value to prevent accidentally overwriting concurrent @@ -784,16 +765,14 @@ def update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.StreamingJob] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.StreamingJob] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(streaming_job, (IO, bytes)): + if isinstance(streaming_job, (IOBase, bytes)): _content = streaming_job else: _json = self._serialize.body(streaming_job, "StreamingJob") @@ -812,10 +791,11 @@ def update( params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -835,7 +815,9 @@ def update( return deserialized - update.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}"} # type: ignore + update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}" + } def _delete_initial( # pylint: disable=inconsistent-return-statements self, resource_group_name: str, job_name: str, **kwargs: Any @@ -851,10 +833,8 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[None] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) request = build_delete_request( resource_group_name=resource_group_name, @@ -866,10 +846,11 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -882,7 +863,9 @@ def _delete_initial( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}"} # type: ignore + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}" + } @distributed_trace def begin_delete(self, resource_group_name: str, job_name: str, **kwargs: Any) -> LROPoller[None]: @@ -908,13 +891,11 @@ def begin_delete(self, resource_group_name: str, job_name: str, **kwargs: Any) - _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[None] - polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._delete_initial( # type: ignore resource_group_name=resource_group_name, @@ -932,7 +913,7 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- return cls(pipeline_response, None, {}) if polling is True: - polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) elif polling is False: polling_method = cast(PollingMethod, NoPolling()) else: @@ -944,9 +925,11 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}" + } @distributed_trace def get( @@ -980,10 +963,8 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[_models.StreamingJob] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.StreamingJob] = kwargs.pop("cls", None) request = build_get_request( resource_group_name=resource_group_name, @@ -996,10 +977,11 @@ def get( params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1019,7 +1001,9 @@ def get( return deserialized - get.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}"} # type: ignore + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}" + } @distributed_trace def list_by_resource_group( @@ -1043,10 +1027,8 @@ def list_by_resource_group( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[_models.StreamingJobListResult] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.StreamingJobListResult] = kwargs.pop("cls", None) error_map = { 401: ClientAuthenticationError, @@ -1069,12 +1051,23 @@ def prepare_request(next_link=None): params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) else: - request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) request.method = "GET" return request @@ -1082,14 +1075,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("StreamingJobListResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1102,7 +1096,9 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) - list_by_resource_group.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs"} # type: ignore + list_by_resource_group.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs" + } @distributed_trace def list(self, expand: Optional[str] = None, **kwargs: Any) -> Iterable["_models.StreamingJob"]: @@ -1121,10 +1117,8 @@ def list(self, expand: Optional[str] = None, **kwargs: Any) -> Iterable["_models _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[_models.StreamingJobListResult] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.StreamingJobListResult] = kwargs.pop("cls", None) error_map = { 401: ClientAuthenticationError, @@ -1146,12 +1140,23 @@ def prepare_request(next_link=None): params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) else: - request = HttpRequest("GET", next_link) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) request.method = "GET" return request @@ -1159,14 +1164,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("StreamingJobListResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1179,7 +1185,7 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) - list.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/streamingjobs"} # type: ignore + list.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/streamingjobs"} def _start_initial( # pylint: disable=inconsistent-return-statements self, @@ -1199,16 +1205,14 @@ def _start_initial( # pylint: disable=inconsistent-return-statements _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[None] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(start_job_parameters, (IO, bytes)): + if isinstance(start_job_parameters, (IOBase, bytes)): _content = start_job_parameters else: if start_job_parameters is not None: @@ -1229,10 +1233,11 @@ def _start_initial( # pylint: disable=inconsistent-return-statements params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1245,7 +1250,9 @@ def _start_initial( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - _start_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/start"} # type: ignore + _start_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/start" + } @overload def begin_start( @@ -1338,7 +1345,7 @@ def begin_start( :param job_name: The name of the streaming job. Required. :type job_name: str :param start_job_parameters: Parameters applicable to a start streaming job operation. Is - either a model type or a IO type. Default value is None. + either a StartStreamingJobParameters type or a IO type. Default value is None. :type start_job_parameters: ~azure.mgmt.streamanalytics.models.StartStreamingJobParameters or IO :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. @@ -1359,14 +1366,12 @@ def begin_start( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[None] - polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._start_initial( # type: ignore resource_group_name=resource_group_name, @@ -1386,7 +1391,7 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- return cls(pipeline_response, None, {}) if polling is True: - polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) elif polling is False: polling_method = cast(PollingMethod, NoPolling()) else: @@ -1398,9 +1403,11 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_start.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/start"} # type: ignore + begin_start.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/start" + } def _stop_initial( # pylint: disable=inconsistent-return-statements self, resource_group_name: str, job_name: str, **kwargs: Any @@ -1416,10 +1423,8 @@ def _stop_initial( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[None] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) request = build_stop_request( resource_group_name=resource_group_name, @@ -1431,10 +1436,11 @@ def _stop_initial( # pylint: disable=inconsistent-return-statements params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1447,7 +1453,9 @@ def _stop_initial( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - _stop_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/stop"} # type: ignore + _stop_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/stop" + } @distributed_trace def begin_stop(self, resource_group_name: str, job_name: str, **kwargs: Any) -> LROPoller[None]: @@ -1474,13 +1482,11 @@ def begin_stop(self, resource_group_name: str, job_name: str, **kwargs: Any) -> _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[None] - polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._stop_initial( # type: ignore resource_group_name=resource_group_name, @@ -1498,7 +1504,7 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- return cls(pipeline_response, None, {}) if polling is True: - polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) elif polling is False: polling_method = cast(PollingMethod, NoPolling()) else: @@ -1510,9 +1516,11 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_stop.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/stop"} # type: ignore + begin_stop.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/stop" + } def _scale_initial( # pylint: disable=inconsistent-return-statements self, @@ -1532,16 +1540,14 @@ def _scale_initial( # pylint: disable=inconsistent-return-statements _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[None] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(scale_job_parameters, (IO, bytes)): + if isinstance(scale_job_parameters, (IOBase, bytes)): _content = scale_job_parameters else: if scale_job_parameters is not None: @@ -1562,10 +1568,11 @@ def _scale_initial( # pylint: disable=inconsistent-return-statements params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1578,7 +1585,9 @@ def _scale_initial( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - _scale_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/scale"} # type: ignore + _scale_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/scale" + } @overload def begin_scale( @@ -1668,7 +1677,7 @@ def begin_scale( :param job_name: The name of the streaming job. Required. :type job_name: str :param scale_job_parameters: Parameters applicable to a scale streaming job operation. Is - either a model type or a IO type. Default value is None. + either a ScaleStreamingJobParameters type or a IO type. Default value is None. :type scale_job_parameters: ~azure.mgmt.streamanalytics.models.ScaleStreamingJobParameters or IO :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. @@ -1689,14 +1698,12 @@ def begin_scale( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[None] - polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._scale_initial( # type: ignore resource_group_name=resource_group_name, @@ -1716,7 +1723,7 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- return cls(pipeline_response, None, {}) if polling is True: - polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) elif polling is False: polling_method = cast(PollingMethod, NoPolling()) else: @@ -1728,6 +1735,8 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_scale.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/scale"} # type: ignore + begin_scale.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/scale" + } diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_subscriptions_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_subscriptions_operations.py index ef1ea6902023..bb676ee0638e 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_subscriptions_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_subscriptions_operations.py @@ -6,8 +6,7 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys -from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +from typing import Any, Callable, Dict, Optional, TypeVar from azure.core.exceptions import ( ClientAuthenticationError, @@ -19,21 +18,15 @@ ) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse -from azure.core.polling import LROPoller, NoPolling, PollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat -from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models from .._serialization import Serializer from .._vendor import _convert_request, _format_url_section -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -45,9 +38,7 @@ def build_list_quotas_request(location: str, subscription_id: str, **kwargs: Any _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -60,7 +51,7 @@ def build_list_quotas_request(location: str, subscription_id: str, **kwargs: Any "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -71,171 +62,6 @@ def build_list_quotas_request(location: str, subscription_id: str, **kwargs: Any return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_test_query_request(location: str, subscription_id: str, **kwargs: Any) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/testQuery", - ) # pylint: disable=line-too-long - path_format_arguments = { - "location": _SERIALIZER.url("location", location, "str"), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url = _format_url_section(_url, **path_format_arguments) - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_compile_query_request(location: str, subscription_id: str, **kwargs: Any) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/compileQuery", - ) # pylint: disable=line-too-long - path_format_arguments = { - "location": _SERIALIZER.url("location", location, "str"), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url = _format_url_section(_url, **path_format_arguments) - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_sample_input_request(location: str, subscription_id: str, **kwargs: Any) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/sampleInput", - ) # pylint: disable=line-too-long - path_format_arguments = { - "location": _SERIALIZER.url("location", location, "str"), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url = _format_url_section(_url, **path_format_arguments) - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_test_input_request(location: str, subscription_id: str, **kwargs: Any) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/testInput", - ) # pylint: disable=line-too-long - path_format_arguments = { - "location": _SERIALIZER.url("location", location, "str"), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url = _format_url_section(_url, **path_format_arguments) - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_test_output_request(location: str, subscription_id: str, **kwargs: Any) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop( - "template_url", - "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/testOutput", - ) # pylint: disable=line-too-long - path_format_arguments = { - "location": _SERIALIZER.url("location", location, "str"), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), - } - - _url = _format_url_section(_url, **path_format_arguments) - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) - - class SubscriptionsOperations: """ .. warning:: @@ -259,8 +85,9 @@ def __init__(self, *args, **kwargs): def list_quotas(self, location: str, **kwargs: Any) -> _models.SubscriptionQuotasListResult: """Retrieves the subscription's current quota information in a particular region. - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. + :param location: The region in which to retrieve the subscription's quota information. You can + find out which regions Azure Stream Analytics is supported in here: + https://azure.microsoft.com/en-us/regions/. Required. :type location: str :keyword callable cls: A custom type or function that will be passed the direct response :return: SubscriptionQuotasListResult or the result of cls(response) @@ -278,10 +105,8 @@ def list_quotas(self, location: str, **kwargs: Any) -> _models.SubscriptionQuota _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[_models.SubscriptionQuotasListResult] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.SubscriptionQuotasListResult] = kwargs.pop("cls", None) request = build_list_quotas_request( location=location, @@ -292,10 +117,11 @@ def list_quotas(self, location: str, **kwargs: Any) -> _models.SubscriptionQuota params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -312,920 +138,6 @@ def list_quotas(self, location: str, **kwargs: Any) -> _models.SubscriptionQuota return deserialized - list_quotas.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/quotas"} # type: ignore - - def _test_query_initial( - self, location: str, test_query: Union[_models.TestQuery, IO], **kwargs: Any - ) -> Optional[_models.QueryTestingResult]: - error_map = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.QueryTestingResult]] - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(test_query, (IO, bytes)): - _content = test_query - else: - _json = self._serialize.body(test_query, "TestQuery") - - request = build_test_query_request( - location=location, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - template_url=self._test_query_initial.metadata["url"], - headers=_headers, - params=_params, - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore - - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("QueryTestingResult", pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - - _test_query_initial.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/testQuery"} # type: ignore - - @overload - def begin_test_query( - self, location: str, test_query: _models.TestQuery, *, content_type: str = "application/json", **kwargs: Any - ) -> LROPoller[_models.QueryTestingResult]: - """Test the Stream Analytics query on a sample input. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param test_query: The query testing object that defines the input, output, and transformation - for the query testing. Required. - :type test_query: ~azure.mgmt.streamanalytics.models.TestQuery - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of LROPoller that returns either QueryTestingResult or the result of - cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.streamanalytics.models.QueryTestingResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def begin_test_query( - self, location: str, test_query: IO, *, content_type: str = "application/json", **kwargs: Any - ) -> LROPoller[_models.QueryTestingResult]: - """Test the Stream Analytics query on a sample input. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param test_query: The query testing object that defines the input, output, and transformation - for the query testing. Required. - :type test_query: IO - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of LROPoller that returns either QueryTestingResult or the result of - cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.streamanalytics.models.QueryTestingResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def begin_test_query( - self, location: str, test_query: Union[_models.TestQuery, IO], **kwargs: Any - ) -> LROPoller[_models.QueryTestingResult]: - """Test the Stream Analytics query on a sample input. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param test_query: The query testing object that defines the input, output, and transformation - for the query testing. Is either a model type or a IO type. Required. - :type test_query: ~azure.mgmt.streamanalytics.models.TestQuery or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of LROPoller that returns either QueryTestingResult or the result of - cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.streamanalytics.models.QueryTestingResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.QueryTestingResult] - polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] - if cont_token is None: - raw_result = self._test_query_initial( # type: ignore - location=location, - test_query=test_query, - api_version=api_version, - content_type=content_type, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize("QueryTestingResult", pipeline_response) - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - if polling is True: - polling_method = cast( - PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) - ) # type: PollingMethod - elif polling is False: - polling_method = cast(PollingMethod, NoPolling()) - else: - polling_method = polling - if cont_token: - return LROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - - begin_test_query.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/testQuery"} # type: ignore - - @overload - def compile_query( - self, - location: str, - compile_query: _models.CompileQuery, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.QueryCompilationResult: - """Compile the Stream Analytics query. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param compile_query: The query compilation object which defines the input, output, and - transformation for the query compilation. Required. - :type compile_query: ~azure.mgmt.streamanalytics.models.CompileQuery - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: QueryCompilationResult or the result of cls(response) - :rtype: ~azure.mgmt.streamanalytics.models.QueryCompilationResult - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def compile_query( - self, location: str, compile_query: IO, *, content_type: str = "application/json", **kwargs: Any - ) -> _models.QueryCompilationResult: - """Compile the Stream Analytics query. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param compile_query: The query compilation object which defines the input, output, and - transformation for the query compilation. Required. - :type compile_query: IO - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: QueryCompilationResult or the result of cls(response) - :rtype: ~azure.mgmt.streamanalytics.models.QueryCompilationResult - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def compile_query( - self, location: str, compile_query: Union[_models.CompileQuery, IO], **kwargs: Any - ) -> _models.QueryCompilationResult: - """Compile the Stream Analytics query. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param compile_query: The query compilation object which defines the input, output, and - transformation for the query compilation. Is either a model type or a IO type. Required. - :type compile_query: ~azure.mgmt.streamanalytics.models.CompileQuery or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: QueryCompilationResult or the result of cls(response) - :rtype: ~azure.mgmt.streamanalytics.models.QueryCompilationResult - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.QueryCompilationResult] - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(compile_query, (IO, bytes)): - _content = compile_query - else: - _json = self._serialize.body(compile_query, "CompileQuery") - - request = build_compile_query_request( - location=location, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - template_url=self.compile_query.metadata["url"], - headers=_headers, - params=_params, - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore - - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("QueryCompilationResult", pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - - compile_query.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/compileQuery"} # type: ignore - - def _sample_input_initial( - self, location: str, sample_input: Union[_models.SampleInput, IO], **kwargs: Any - ) -> _models.SampleInputResult: - error_map = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.SampleInputResult] - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(sample_input, (IO, bytes)): - _content = sample_input - else: - _json = self._serialize.body(sample_input, "SampleInput") - - request = build_sample_input_request( - location=location, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - template_url=self._sample_input_initial.metadata["url"], - headers=_headers, - params=_params, - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore - - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("SampleInputResult", pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - - _sample_input_initial.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/sampleInput"} # type: ignore - - @overload - def begin_sample_input( - self, location: str, sample_input: _models.SampleInput, *, content_type: str = "application/json", **kwargs: Any - ) -> LROPoller[_models.SampleInputResult]: - """Sample the Stream Analytics input data. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param sample_input: Defines the necessary parameters for sampling the Stream Analytics input - data. Required. - :type sample_input: ~azure.mgmt.streamanalytics.models.SampleInput - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of LROPoller that returns either SampleInputResult or the result of - cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.streamanalytics.models.SampleInputResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def begin_sample_input( - self, location: str, sample_input: IO, *, content_type: str = "application/json", **kwargs: Any - ) -> LROPoller[_models.SampleInputResult]: - """Sample the Stream Analytics input data. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param sample_input: Defines the necessary parameters for sampling the Stream Analytics input - data. Required. - :type sample_input: IO - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of LROPoller that returns either SampleInputResult or the result of - cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.streamanalytics.models.SampleInputResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def begin_sample_input( - self, location: str, sample_input: Union[_models.SampleInput, IO], **kwargs: Any - ) -> LROPoller[_models.SampleInputResult]: - """Sample the Stream Analytics input data. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param sample_input: Defines the necessary parameters for sampling the Stream Analytics input - data. Is either a model type or a IO type. Required. - :type sample_input: ~azure.mgmt.streamanalytics.models.SampleInput or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of LROPoller that returns either SampleInputResult or the result of - cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.streamanalytics.models.SampleInputResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.SampleInputResult] - polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] - if cont_token is None: - raw_result = self._sample_input_initial( # type: ignore - location=location, - sample_input=sample_input, - api_version=api_version, - content_type=content_type, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize("SampleInputResult", pipeline_response) - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - if polling is True: - polling_method = cast( - PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) - ) # type: PollingMethod - elif polling is False: - polling_method = cast(PollingMethod, NoPolling()) - else: - polling_method = polling - if cont_token: - return LROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - - begin_sample_input.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/sampleInput"} # type: ignore - - def _test_input_initial( - self, location: str, test_input: Union[_models.TestInput, IO], **kwargs: Any - ) -> _models.TestDatasourceResult: - error_map = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.TestDatasourceResult] - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(test_input, (IO, bytes)): - _content = test_input - else: - _json = self._serialize.body(test_input, "TestInput") - - request = build_test_input_request( - location=location, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - template_url=self._test_input_initial.metadata["url"], - headers=_headers, - params=_params, - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore - - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("TestDatasourceResult", pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - - _test_input_initial.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/testInput"} # type: ignore - - @overload - def begin_test_input( - self, location: str, test_input: _models.TestInput, *, content_type: str = "application/json", **kwargs: Any - ) -> LROPoller[_models.TestDatasourceResult]: - """Test the Stream Analytics input. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param test_input: Defines the necessary parameters for testing the Stream Analytics input. - Required. - :type test_input: ~azure.mgmt.streamanalytics.models.TestInput - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of LROPoller that returns either TestDatasourceResult or the result of - cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.streamanalytics.models.TestDatasourceResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def begin_test_input( - self, location: str, test_input: IO, *, content_type: str = "application/json", **kwargs: Any - ) -> LROPoller[_models.TestDatasourceResult]: - """Test the Stream Analytics input. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param test_input: Defines the necessary parameters for testing the Stream Analytics input. - Required. - :type test_input: IO - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of LROPoller that returns either TestDatasourceResult or the result of - cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.streamanalytics.models.TestDatasourceResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def begin_test_input( - self, location: str, test_input: Union[_models.TestInput, IO], **kwargs: Any - ) -> LROPoller[_models.TestDatasourceResult]: - """Test the Stream Analytics input. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param test_input: Defines the necessary parameters for testing the Stream Analytics input. Is - either a model type or a IO type. Required. - :type test_input: ~azure.mgmt.streamanalytics.models.TestInput or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of LROPoller that returns either TestDatasourceResult or the result of - cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.streamanalytics.models.TestDatasourceResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.TestDatasourceResult] - polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] - if cont_token is None: - raw_result = self._test_input_initial( # type: ignore - location=location, - test_input=test_input, - api_version=api_version, - content_type=content_type, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize("TestDatasourceResult", pipeline_response) - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - if polling is True: - polling_method = cast( - PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) - ) # type: PollingMethod - elif polling is False: - polling_method = cast(PollingMethod, NoPolling()) - else: - polling_method = polling - if cont_token: - return LROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - - begin_test_input.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/testInput"} # type: ignore - - def _test_output_initial( - self, location: str, test_output: Union[_models.TestOutput, IO], **kwargs: Any - ) -> _models.TestDatasourceResult: - error_map = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.TestDatasourceResult] - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(test_output, (IO, bytes)): - _content = test_output - else: - _json = self._serialize.body(test_output, "TestOutput") - - request = build_test_output_request( - location=location, - subscription_id=self._config.subscription_id, - api_version=api_version, - content_type=content_type, - json=_json, - content=_content, - template_url=self._test_output_initial.metadata["url"], - headers=_headers, - params=_params, - ) - request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore - - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize("TestDatasourceResult", pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - - _test_output_initial.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/testOutput"} # type: ignore - - @overload - def begin_test_output( - self, location: str, test_output: _models.TestOutput, *, content_type: str = "application/json", **kwargs: Any - ) -> LROPoller[_models.TestDatasourceResult]: - """Test the Stream Analytics output. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param test_output: Defines the necessary parameters for testing the Stream Analytics output. - Required. - :type test_output: ~azure.mgmt.streamanalytics.models.TestOutput - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of LROPoller that returns either TestDatasourceResult or the result of - cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.streamanalytics.models.TestDatasourceResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def begin_test_output( - self, location: str, test_output: IO, *, content_type: str = "application/json", **kwargs: Any - ) -> LROPoller[_models.TestDatasourceResult]: - """Test the Stream Analytics output. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param test_output: Defines the necessary parameters for testing the Stream Analytics output. - Required. - :type test_output: IO - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of LROPoller that returns either TestDatasourceResult or the result of - cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.streamanalytics.models.TestDatasourceResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def begin_test_output( - self, location: str, test_output: Union[_models.TestOutput, IO], **kwargs: Any - ) -> LROPoller[_models.TestDatasourceResult]: - """Test the Stream Analytics output. - - :param location: The region to which the request is sent. You can find out which regions Azure - Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/. Required. - :type location: str - :param test_output: Defines the necessary parameters for testing the Stream Analytics output. - Is either a model type or a IO type. Required. - :type test_output: ~azure.mgmt.streamanalytics.models.TestOutput or IO - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this - operation to not poll, or pass in your own initialized polling object for a personal polling - strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of LROPoller that returns either TestDatasourceResult or the result of - cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.streamanalytics.models.TestDatasourceResult] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.TestDatasourceResult] - polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] - if cont_token is None: - raw_result = self._test_output_initial( # type: ignore - location=location, - test_output=test_output, - api_version=api_version, - content_type=content_type, - cls=lambda x, y, z: x, - headers=_headers, - params=_params, - **kwargs - ) - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize("TestDatasourceResult", pipeline_response) - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - if polling is True: - polling_method = cast( - PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) - ) # type: PollingMethod - elif polling is False: - polling_method = cast(PollingMethod, NoPolling()) - else: - polling_method = polling - if cont_token: - return LROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - - begin_test_output.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/testOutput"} # type: ignore + list_quotas.metadata = { + "url": "/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/quotas" + } diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_transformations_operations.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_transformations_operations.py index a573abf6ecc9..4b0bcccc20cf 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_transformations_operations.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/azure/mgmt/streamanalytics/operations/_transformations_operations.py @@ -6,7 +6,7 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload from azure.core.exceptions import ( @@ -28,10 +28,6 @@ from .._serialization import Serializer from .._vendor import _convert_request, _format_url_section -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -52,10 +48,8 @@ def build_create_or_replace_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -72,7 +66,7 @@ def build_create_or_replace_request( "transformationName": _SERIALIZER.url("transformation_name", transformation_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -101,10 +95,8 @@ def build_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -121,7 +113,7 @@ def build_update_request( "transformationName": _SERIALIZER.url("transformation_name", transformation_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -142,9 +134,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-03-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -161,7 +151,7 @@ def build_get_request( "transformationName": _SERIALIZER.url("transformation_name", transformation_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -299,8 +289,8 @@ def create_or_replace( :param transformation_name: The name of the transformation. Required. :type transformation_name: str :param transformation: The definition of the transformation that will be used to create a new - transformation or replace the existing one under the streaming job. Is either a model type or a - IO type. Required. + transformation or replace the existing one under the streaming job. Is either a Transformation + type or a IO type. Required. :type transformation: ~azure.mgmt.streamanalytics.models.Transformation or IO :param if_match: The ETag of the transformation. Omit this value to always overwrite the current transformation. Specify the last-seen ETag value to prevent accidentally overwriting @@ -329,16 +319,14 @@ def create_or_replace( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.Transformation] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Transformation] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(transformation, (IO, bytes)): + if isinstance(transformation, (IOBase, bytes)): _content = transformation else: _json = self._serialize.body(transformation, "Transformation") @@ -359,10 +347,11 @@ def create_or_replace( params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -384,11 +373,13 @@ def create_or_replace( deserialized = self._deserialize("Transformation", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore - create_or_replace.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/transformations/{transformationName}"} # type: ignore + create_or_replace.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/transformations/{transformationName}" + } @overload def update( @@ -499,7 +490,7 @@ def update( the corresponding properties in the existing transformation (ie. Those properties will be updated). Any properties that are set to null here will mean that the corresponding property in the existing transformation will remain the same and not change as a result of this PATCH - operation. Is either a model type or a IO type. Required. + operation. Is either a Transformation type or a IO type. Required. :type transformation: ~azure.mgmt.streamanalytics.models.Transformation or IO :param if_match: The ETag of the transformation. Omit this value to always overwrite the current transformation. Specify the last-seen ETag value to prevent accidentally overwriting @@ -524,16 +515,14 @@ def update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType[_models.Transformation] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Transformation] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(transformation, (IO, bytes)): + if isinstance(transformation, (IOBase, bytes)): _content = transformation else: _json = self._serialize.body(transformation, "Transformation") @@ -553,10 +542,11 @@ def update( params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -576,7 +566,9 @@ def update( return deserialized - update.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/transformations/{transformationName}"} # type: ignore + update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/transformations/{transformationName}" + } @distributed_trace def get( @@ -607,10 +599,8 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop( - "api_version", _params.pop("api-version", "2021-10-01-preview") - ) # type: Literal["2021-10-01-preview"] - cls = kwargs.pop("cls", None) # type: ClsType[_models.Transformation] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.Transformation] = kwargs.pop("cls", None) request = build_get_request( resource_group_name=resource_group_name, @@ -623,10 +613,11 @@ def get( params=_params, ) request = _convert_request(request) - request.url = self._client.format_url(request.url) # type: ignore + request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access - request, stream=False, **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -646,4 +637,6 @@ def get( return deserialized - get.metadata = {"url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/transformations/{transformationName}"} # type: ignore + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.StreamAnalytics/streamingjobs/{jobName}/transformations/{transformationName}" + } diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_new_cluster.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_create.py similarity index 92% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_new_cluster.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_create.py index d8f57f69eec3..04a1a4733455 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_new_cluster.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_create.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python create_a_new_cluster.py + python cluster_create.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2020-03-01-preview/examples/Cluster_Create.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Cluster_Create.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/delete_a_cluster.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_delete.py similarity index 88% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/delete_a_cluster.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_delete.py index b6f6ed11987e..ed49b753726e 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/delete_a_cluster.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_delete.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python delete_a_cluster.py + python cluster_delete.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -29,13 +29,12 @@ def main(): subscription_id="34adfa4f-cedf-4dc0-ba29-b6d1a69ab345", ) - response = client.clusters.begin_delete( + client.clusters.begin_delete( resource_group_name="sjrg", cluster_name="testcluster", ).result() - print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2020-03-01-preview/examples/Cluster_Delete.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Cluster_Delete.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_a_cluster.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_get.py similarity index 92% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_a_cluster.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_get.py index 8076f87718ac..3e028ca7195c 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_a_cluster.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_get.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python get_a_cluster.py + python cluster_get.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -36,6 +36,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2020-03-01-preview/examples/Cluster_Get.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Cluster_Get.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/list_clusters_in_resource_group.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_list_by_resource_group.py similarity index 91% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/list_clusters_in_resource_group.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_list_by_resource_group.py index 283336ee1328..0e656e02658c 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/list_clusters_in_resource_group.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_list_by_resource_group.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python list_clusters_in_resource_group.py + python cluster_list_by_resource_group.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -36,6 +36,6 @@ def main(): print(item) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2020-03-01-preview/examples/Cluster_ListByResourceGroup.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Cluster_ListByResourceGroup.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/list_the_clusters_in_a_subscription.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_list_by_subscription.py similarity index 90% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/list_the_clusters_in_a_subscription.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_list_by_subscription.py index 8bc8b1d4ebc0..c059690b47e3 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/list_the_clusters_in_a_subscription.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_list_by_subscription.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python list_the_clusters_in_a_subscription.py + python cluster_list_by_subscription.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -34,6 +34,6 @@ def main(): print(item) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2020-03-01-preview/examples/Cluster_ListBySubscription.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Cluster_ListBySubscription.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/list_all_streaming_jobs_in_cluster.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_list_streaming_jobs.py similarity index 91% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/list_all_streaming_jobs_in_cluster.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_list_streaming_jobs.py index 6a2d7e8a16f2..41ff70bebdfc 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/list_all_streaming_jobs_in_cluster.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_list_streaming_jobs.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python list_all_streaming_jobs_in_cluster.py + python cluster_list_streaming_jobs.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -37,6 +37,6 @@ def main(): print(item) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2020-03-01-preview/examples/Cluster_ListStreamingJobs.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Cluster_ListStreamingJobs.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_a_cluster.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_update.py similarity index 92% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_a_cluster.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_update.py index f63c47e81b02..f9df0579c29b 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_a_cluster.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/cluster_update.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python update_a_cluster.py + python cluster_update.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2020-03-01-preview/examples/Cluster_Update.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Cluster_Update.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/compile_the_stream_analytics_query.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/compile_the_stream_analytics_query.py deleted file mode 100644 index 034a0a12281c..000000000000 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/compile_the_stream_analytics_query.py +++ /dev/null @@ -1,55 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from azure.identity import DefaultAzureCredential -from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient - -""" -# PREREQUISITES - pip install azure-identity - pip install azure-mgmt-streamanalytics -# USAGE - python compile_the_stream_analytics_query.py - - Before run the sample, please set the values of the client ID, tenant ID and client secret - of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, - AZURE_CLIENT_SECRET. For more info about how to get the value, please see: - https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal -""" - - -def main(): - client = StreamAnalyticsManagementClient( - credential=DefaultAzureCredential(), - subscription_id="56b5e0a9-b645-407d-99b0-c64f86013e3d", - ) - - response = client.subscriptions.compile_query( - location="West US", - compile_query={ - "compatibilityLevel": "1.2", - "functions": [ - { - "bindingType": "Microsoft.StreamAnalytics/JavascriptUdf", - "inputs": [{"dataType": "any", "isConfigurationParameter": None}], - "name": "function1", - "output": {"dataType": "bigint"}, - "type": "Scalar", - } - ], - "inputs": [{"name": "input1", "type": "Stream"}], - "jobType": "Cloud", - "query": "SELECT\r\n *\r\nINTO\r\n [output1]\r\nFROM\r\n [input1]", - }, - ) - print(response) - - -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Subscription_CompileQuery.json -if __name__ == "__main__": - main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_clr_udf_function.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_clr_udf_function.py deleted file mode 100644 index 99b77830feb9..000000000000 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_clr_udf_function.py +++ /dev/null @@ -1,60 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from azure.identity import DefaultAzureCredential -from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient - -""" -# PREREQUISITES - pip install azure-identity - pip install azure-mgmt-streamanalytics -# USAGE - python create_a_clr_udf_function.py - - Before run the sample, please set the values of the client ID, tenant ID and client secret - of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, - AZURE_CLIENT_SECRET. For more info about how to get the value, please see: - https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal -""" - - -def main(): - client = StreamAnalyticsManagementClient( - credential=DefaultAzureCredential(), - subscription_id="56b5e0a9-b645-407d-99b0-c64f86013e3d", - ) - - response = client.functions.create_or_replace( - resource_group_name="sjrg", - job_name="sjName", - function_name="function588", - function={ - "properties": { - "properties": { - "binding": { - "properties": { - "class": "ASAEdgeUDFDemo.Class1", - "dllPath": "ASAEdgeApplication2_CodeBehind", - "method": "SquareFunction", - "updateMode": "Static", - }, - "type": "Microsoft.StreamAnalytics/CLRUdf", - }, - "inputs": [{"dataType": "nvarchar(max)"}], - "output": {"dataType": "nvarchar(max)"}, - }, - "type": "Scalar", - } - }, - ) - print(response) - - -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Function_Create_CSharp.json -if __name__ == "__main__": - main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_postgre_sql_output.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_postgre_sql_output.py deleted file mode 100644 index 98729a24a79e..000000000000 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_postgre_sql_output.py +++ /dev/null @@ -1,58 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from azure.identity import DefaultAzureCredential -from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient - -""" -# PREREQUISITES - pip install azure-identity - pip install azure-mgmt-streamanalytics -# USAGE - python create_a_postgre_sql_output.py - - Before run the sample, please set the values of the client ID, tenant ID and client secret - of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, - AZURE_CLIENT_SECRET. For more info about how to get the value, please see: - https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal -""" - - -def main(): - client = StreamAnalyticsManagementClient( - credential=DefaultAzureCredential(), - subscription_id="56b5e0a9-b645-407d-99b0-c64f86013e3d", - ) - - response = client.outputs.create_or_replace( - resource_group_name="sjrg7983", - job_name="sj2331", - output_name="output3022", - output={ - "properties": { - "datasource": { - "properties": { - "authenticationMode": "Msi", - "database": "someDatabase", - "maxWriterCount": 1, - "password": "somePassword", - "server": "someServer", - "table": "someTable", - "user": "user", - }, - "type": "Microsoft.DBForPostgreSQL/servers/databases", - } - } - }, - ) - print(response) - - -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Create_PostgreSQL.json -if __name__ == "__main__": - main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_an_azure_ml_service_function.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_an_azure_ml_service_function.py deleted file mode 100644 index 85e45d139c52..000000000000 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_an_azure_ml_service_function.py +++ /dev/null @@ -1,64 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from azure.identity import DefaultAzureCredential -from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient - -""" -# PREREQUISITES - pip install azure-identity - pip install azure-mgmt-streamanalytics -# USAGE - python create_an_azure_ml_service_function.py - - Before run the sample, please set the values of the client ID, tenant ID and client secret - of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, - AZURE_CLIENT_SECRET. For more info about how to get the value, please see: - https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal -""" - - -def main(): - client = StreamAnalyticsManagementClient( - credential=DefaultAzureCredential(), - subscription_id="56b5e0a9-b645-407d-99b0-c64f86013e3d", - ) - - response = client.functions.create_or_replace( - resource_group_name="sjrg", - job_name="sjName", - function_name="function588", - function={ - "properties": { - "properties": { - "binding": { - "properties": { - "apiKey": "someApiKey==", - "batchSize": 1000, - "endpoint": "someAzureMLEndpointURL", - "inputRequestName": "Inputs", - "inputs": [{"dataType": "array", "mapTo": 0, "name": "data"}], - "numberOfParallelRequests": 1, - "outputResponseName": "Results", - "outputs": [{"dataType": "string", "name": "Sentiment"}], - }, - "type": "Microsoft.MachineLearningServices", - }, - "inputs": [{"dataType": "nvarchar(max)"}], - "output": {"dataType": "nvarchar(max)"}, - }, - "type": "Scalar", - } - }, - ) - print(response) - - -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Function_Create_AzureMLService.json -if __name__ == "__main__": - main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_an_event_grid_input.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_an_event_grid_input.py deleted file mode 100644 index 1c306f610ea1..000000000000 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_an_event_grid_input.py +++ /dev/null @@ -1,69 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from azure.identity import DefaultAzureCredential -from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient - -""" -# PREREQUISITES - pip install azure-identity - pip install azure-mgmt-streamanalytics -# USAGE - python create_an_event_grid_input.py - - Before run the sample, please set the values of the client ID, tenant ID and client secret - of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, - AZURE_CLIENT_SECRET. For more info about how to get the value, please see: - https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal -""" - - -def main(): - client = StreamAnalyticsManagementClient( - credential=DefaultAzureCredential(), - subscription_id="56b5e0a9-b645-407d-99b0-c64f86013e3d", - ) - - response = client.inputs.create_or_replace( - resource_group_name="sjrg3467", - job_name="sj9742", - input_name="input7970", - input={ - "properties": { - "datasource": { - "properties": { - "eventTypes": ["Microsoft.Storage.BlobCreated"], - "schema": "CloudEventSchema", - "storageAccounts": [ - {"accountKey": "myaccountkey", "accountName": "myaccount", "authenticationMode": "Msi"} - ], - "subscriber": { - "properties": { - "authenticationMode": "Msi", - "consumerGroupName": "sdkconsumergroup", - "eventHubName": "sdkeventhub", - "partitionCount": 16, - "serviceBusNamespace": "sdktest", - "sharedAccessPolicyKey": "someSharedAccessPolicyKey==", - "sharedAccessPolicyName": "RootManageSharedAccessKey", - }, - "type": "Microsoft.EventHub/EventHub", - }, - }, - "type": "Microsoft.EventGrid/EventSubscriptions", - }, - "type": "Stream", - } - }, - ) - print(response) - - -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Input_Create_EventGrid.json -if __name__ == "__main__": - main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_an_azure_ml_function.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_create_azure_ml.py similarity index 94% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_an_azure_ml_function.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_create_azure_ml.py index 86d839ae815f..d4cb51a20b60 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_an_azure_ml_function.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_create_azure_ml.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python create_an_azure_ml_function.py + python function_create_azure_ml.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -59,6 +59,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Function_Create_AzureML.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Function_Create_AzureML.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_java_script_function.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_create_java_script.py similarity index 93% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_java_script_function.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_create_java_script.py index ccc9fff7e9e8..ba7bb2bda1c1 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_java_script_function.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_create_java_script.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python create_a_java_script_function.py + python function_create_java_script.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -50,6 +50,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Function_Create_JavaScript.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Function_Create_JavaScript.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/delete_a_function.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_delete.py similarity index 88% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/delete_a_function.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_delete.py index fd2364d9245f..8ef9c4b40288 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/delete_a_function.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_delete.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python delete_a_function.py + python function_delete.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -29,14 +29,13 @@ def main(): subscription_id="56b5e0a9-b645-407d-99b0-c64f86013e3d", ) - response = client.functions.delete( + client.functions.delete( resource_group_name="sjrg1637", job_name="sj8653", function_name="function8197", ) - print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Function_Delete.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Function_Delete.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_an_azure_ml_function.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_get_azure_ml.py similarity index 91% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_an_azure_ml_function.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_get_azure_ml.py index e6f3735014ff..bcec4c1ea93a 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_an_azure_ml_function.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_get_azure_ml.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python get_an_azure_ml_function.py + python function_get_azure_ml.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Function_Get_AzureML.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Function_Get_AzureML.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_a_java_script_function.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_get_java_script.py similarity index 91% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_a_java_script_function.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_get_java_script.py index 57eba9e4463a..ee8e0653ba0c 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_a_java_script_function.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_get_java_script.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python get_a_java_script_function.py + python function_get_java_script.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Function_Get_JavaScript.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Function_Get_JavaScript.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/list_all_functions_in_a_streaming_job.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_list_by_streaming_job.py similarity index 90% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/list_all_functions_in_a_streaming_job.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_list_by_streaming_job.py index 743e223f057d..573d278426a5 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/list_all_functions_in_a_streaming_job.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_list_by_streaming_job.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python list_all_functions_in_a_streaming_job.py + python function_list_by_streaming_job.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -37,6 +37,6 @@ def main(): print(item) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Function_ListByStreamingJob.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Function_ListByStreamingJob.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/retrieve_the_default_definition_for_an_azure_ml_function.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_retrieve_default_definition_azure_ml.py similarity index 89% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/retrieve_the_default_definition_for_an_azure_ml_function.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_retrieve_default_definition_azure_ml.py index d41b5e7b8475..ea6f76c8ebb7 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/retrieve_the_default_definition_for_an_azure_ml_function.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_retrieve_default_definition_azure_ml.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python retrieve_the_default_definition_for_an_azure_ml_function.py + python function_retrieve_default_definition_azure_ml.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Function_RetrieveDefaultDefinition_AzureML.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Function_RetrieveDefaultDefinition_AzureML.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/test_the_connection_for_an_azure_ml_function.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_test_azure_ml.py similarity index 90% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/test_the_connection_for_an_azure_ml_function.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_test_azure_ml.py index 979207b6c002..13393faf2d5f 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/test_the_connection_for_an_azure_ml_function.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_test_azure_ml.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python test_the_connection_for_an_azure_ml_function.py + python function_test_azure_ml.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Function_Test_AzureML.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Function_Test_AzureML.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/test_the_connection_for_a_java_script_function.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_test_java_script.py similarity index 90% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/test_the_connection_for_a_java_script_function.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_test_java_script.py index 4ae07bbf7c06..bcb07e60b6c6 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/test_the_connection_for_a_java_script_function.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_test_java_script.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python test_the_connection_for_a_java_script_function.py + python function_test_java_script.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Function_Test_JavaScript.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Function_Test_JavaScript.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_an_azure_ml_function.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_update_azure_ml.py similarity index 92% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_an_azure_ml_function.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_update_azure_ml.py index e4374257148a..5c536f5acbd6 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_an_azure_ml_function.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_update_azure_ml.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python update_an_azure_ml_function.py + python function_update_azure_ml.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -45,6 +45,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Function_Update_AzureML.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Function_Update_AzureML.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_a_java_script_function.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_update_java_script.py similarity index 92% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_a_java_script_function.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_update_java_script.py index edd7d761de10..ff7614ef3d9a 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_a_java_script_function.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/function_update_java_script.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python update_a_java_script_function.py + python function_update_java_script.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -48,6 +48,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Function_Update_JavaScript.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Function_Update_JavaScript.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_gateway_message_bus_input.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_create_gateway_message_bus.py similarity index 91% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_gateway_message_bus_input.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_create_gateway_message_bus.py index 40fa1dffa6b3..5ee43f4db447 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_gateway_message_bus_input.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_create_gateway_message_bus.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python create_a_gateway_message_bus_input.py + python input_create_gateway_message_bus.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -43,6 +43,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Input_Create_GatewayMessageBus.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Input_Create_GatewayMessageBus.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_reference_blob_input_with_csv_serialization.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_create_reference_blob_csv.py similarity index 82% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_reference_blob_input_with_csv_serialization.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_create_reference_blob_csv.py index 35f850f23b37..9e68a8678ca9 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_reference_blob_input_with_csv_serialization.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_create_reference_blob_csv.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python create_a_reference_blob_input_with_csv_serialization.py + python input_create_reference_blob_csv.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -37,14 +37,9 @@ def main(): "properties": { "datasource": { "properties": { - "blobName": "myblobinput", "container": "state", "dateFormat": "yyyy/MM/dd", - "deltaPathPattern": "/testBlob/{date}/delta/{time}/", - "deltaSnapshotRefreshRate": "16:14:30", - "fullSnapshotRefreshRate": "16:14:30", "pathPattern": "{date}/{time}", - "sourcePartitionCount": 16, "storageAccounts": [{"accountKey": "someAccountKey==", "accountName": "someAccountName"}], "timeFormat": "HH", }, @@ -58,6 +53,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Input_Create_Reference_Blob_CSV.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Input_Create_Reference_Blob_CSV.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_reference_file_input.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_create_reference_file.py similarity index 91% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_reference_file_input.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_create_reference_file.py index 2e66f1873460..bf4bd0a82fd2 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_reference_file_input.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_create_reference_file.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python create_a_reference_file_input.py + python input_create_reference_file.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Input_Create_Reference_File.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Input_Create_Reference_File.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_stream_blob_input_with_csv_serialization.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_create_stream_blob_csv.py similarity index 93% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_stream_blob_input_with_csv_serialization.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_create_stream_blob_csv.py index 36d802201203..cd0f007e545f 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_stream_blob_input_with_csv_serialization.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_create_stream_blob_csv.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python create_a_stream_blob_input_with_csv_serialization.py + python input_create_stream_blob_csv.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -54,6 +54,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Input_Create_Stream_Blob_CSV.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Input_Create_Stream_Blob_CSV.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_stream_event_hub_input_with_json_serialization.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_create_stream_event_hub_json.py similarity index 89% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_stream_event_hub_input_with_json_serialization.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_create_stream_event_hub_json.py index a01e5cea9f2a..f4c596327972 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_stream_event_hub_input_with_json_serialization.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_create_stream_event_hub_json.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python create_a_stream_event_hub_input_with_json_serialization.py + python input_create_stream_event_hub_json.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -47,13 +47,12 @@ def main(): }, "serialization": {"properties": {"encoding": "UTF8"}, "type": "Json"}, "type": "Stream", - "watermarkSettings": {"watermarkMode": "ReadWatermark"}, } }, ) print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Input_Create_Stream_EventHub_JSON.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Input_Create_Stream_EventHub_JSON.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_stream_io_t_hub_input_with_avro_serialization.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_create_stream_io_thub_avro.py similarity index 92% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_stream_io_t_hub_input_with_avro_serialization.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_create_stream_io_thub_avro.py index 55d619523506..392f299e04d8 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_stream_io_t_hub_input_with_avro_serialization.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_create_stream_io_thub_avro.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python create_a_stream_io_t_hub_input_with_avro_serialization.py + python input_create_stream_io_thub_avro.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -53,6 +53,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Input_Create_Stream_IoTHub_Avro.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Input_Create_Stream_IoTHub_Avro.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/delete_an_input.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_delete.py similarity index 89% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/delete_an_input.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_delete.py index a28403996bb5..83d9a59a4af0 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/delete_an_input.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_delete.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python delete_an_input.py + python input_delete.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -29,14 +29,13 @@ def main(): subscription_id="56b5e0a9-b645-407d-99b0-c64f86013e3d", ) - response = client.inputs.delete( + client.inputs.delete( resource_group_name="sjrg8440", job_name="sj9597", input_name="input7225", ) - print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Input_Delete.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Input_Delete.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_a_reference_blob_input_with_csv_serialization.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_get_reference_blob_csv.py similarity index 90% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_a_reference_blob_input_with_csv_serialization.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_get_reference_blob_csv.py index a2cf545cfbef..c8342250f237 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_a_reference_blob_input_with_csv_serialization.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_get_reference_blob_csv.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python get_a_reference_blob_input_with_csv_serialization.py + python input_get_reference_blob_csv.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Input_Get_Reference_Blob_CSV.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Input_Get_Reference_Blob_CSV.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_a_stream_blob_input_with_csv_serialization.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_get_stream_blob_csv.py similarity index 90% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_a_stream_blob_input_with_csv_serialization.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_get_stream_blob_csv.py index 2f0d5bffed27..d0cae535e386 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_a_stream_blob_input_with_csv_serialization.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_get_stream_blob_csv.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python get_a_stream_blob_input_with_csv_serialization.py + python input_get_stream_blob_csv.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Input_Get_Stream_Blob_CSV.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Input_Get_Stream_Blob_CSV.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_a_stream_event_hub_input_with_json_serialization.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_get_stream_event_hub_json.py similarity index 89% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_a_stream_event_hub_input_with_json_serialization.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_get_stream_event_hub_json.py index 7548646bb308..03ed7ecf6a0f 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_a_stream_event_hub_input_with_json_serialization.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_get_stream_event_hub_json.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python get_a_stream_event_hub_input_with_json_serialization.py + python input_get_stream_event_hub_json.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Input_Get_Stream_EventHub_JSON.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Input_Get_Stream_EventHub_JSON.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_a_stream_io_t_hub_input_with_avro_serialization.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_get_stream_io_thub_avro.py similarity index 89% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_a_stream_io_t_hub_input_with_avro_serialization.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_get_stream_io_thub_avro.py index 538adaa60811..f306eaf31bcf 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_a_stream_io_t_hub_input_with_avro_serialization.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_get_stream_io_thub_avro.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python get_a_stream_io_t_hub_input_with_avro_serialization.py + python input_get_stream_io_thub_avro.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Input_Get_Stream_IoTHub_Avro.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Input_Get_Stream_IoTHub_Avro.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/list_all_inputs_in_a_streaming_job.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_list_by_streaming_job.py similarity index 91% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/list_all_inputs_in_a_streaming_job.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_list_by_streaming_job.py index a093ca4df837..b975babeb0d4 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/list_all_inputs_in_a_streaming_job.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_list_by_streaming_job.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python list_all_inputs_in_a_streaming_job.py + python input_list_by_streaming_job.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -37,6 +37,6 @@ def main(): print(item) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Input_ListByStreamingJob.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Input_ListByStreamingJob.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_valid_sk_us_list_for_the_specified_streaming_job.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_list_by_streaming_job_diagnostics.py similarity index 87% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_valid_sk_us_list_for_the_specified_streaming_job.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_list_by_streaming_job_diagnostics.py index 985b689cc3d4..5c71a710a4e3 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_valid_sk_us_list_for_the_specified_streaming_job.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_list_by_streaming_job_diagnostics.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python get_valid_sk_us_list_for_the_specified_streaming_job.py + python input_list_by_streaming_job_diagnostics.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -29,7 +29,7 @@ def main(): subscription_id="56b5e0a9-b645-407d-99b0-c64f86013e3d", ) - response = client.sku.list( + response = client.inputs.list_by_streaming_job( resource_group_name="sjrg3276", job_name="sj7804", ) @@ -37,6 +37,6 @@ def main(): print(item) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/StreamingJob_GetSkus.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Input_ListByStreamingJob_Diagnostics.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/test_the_connection_for_an_input.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_test.py similarity index 92% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/test_the_connection_for_an_input.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_test.py index 907fb3b7538f..14fdadb5dcc2 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/test_the_connection_for_an_input.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_test.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python test_the_connection_for_an_input.py + python input_test.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Input_Test.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Input_Test.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_a_reference_blob_input.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_update_reference_blob.py similarity index 92% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_a_reference_blob_input.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_update_reference_blob.py index 99784daf08a2..06513230b4e7 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_a_reference_blob_input.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_update_reference_blob.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python update_a_reference_blob_input.py + python input_update_reference_blob.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -44,6 +44,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Input_Update_Reference_Blob.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Input_Update_Reference_Blob.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_a_stream_blob_input.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_update_stream_blob.py similarity index 92% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_a_stream_blob_input.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_update_stream_blob.py index b52c0e033f75..fc028e636722 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_a_stream_blob_input.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_update_stream_blob.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python update_a_stream_blob_input.py + python input_update_stream_blob.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -44,6 +44,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Input_Update_Stream_Blob.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Input_Update_Stream_Blob.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_a_stream_event_hub_input.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_update_stream_event_hub.py similarity index 92% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_a_stream_event_hub_input.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_update_stream_event_hub.py index edd4c714f3fc..ae53be56da91 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_a_stream_event_hub_input.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_update_stream_event_hub.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python update_a_stream_event_hub_input.py + python input_update_stream_event_hub.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -47,6 +47,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Input_Update_Stream_EventHub.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Input_Update_Stream_EventHub.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_a_stream_io_t_hub_input.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_update_stream_io_thub.py similarity index 92% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_a_stream_io_t_hub_input.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_update_stream_io_thub.py index 16ab7f0b0b02..3ecb1d261b18 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_a_stream_io_t_hub_input.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/input_update_stream_io_thub.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python update_a_stream_io_t_hub_input.py + python input_update_stream_io_thub.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -47,6 +47,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Input_Update_Stream_IoTHub.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Input_Update_Stream_IoTHub.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/list_all_inputs_in_a_streaming_job_and_include_diagnostic_information_using_the_$select_odata_query_parameter.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/list_all_inputs_in_a_streaming_job_and_include_diagnostic_information_using_the_$select_odata_query_parameter.py deleted file mode 100644 index 76a8cb59bd16..000000000000 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/list_all_inputs_in_a_streaming_job_and_include_diagnostic_information_using_the_$select_odata_query_parameter.py +++ /dev/null @@ -1,42 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from azure.identity import DefaultAzureCredential -from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient - -""" -# PREREQUISITES - pip install azure-identity - pip install azure-mgmt-streamanalytics -# USAGE - python list_all_inputs_in_a_streaming_job_and_include_diagnostic_information_using_the_$select_odata_query_parameter.py - - Before run the sample, please set the values of the client ID, tenant ID and client secret - of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, - AZURE_CLIENT_SECRET. For more info about how to get the value, please see: - https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal -""" - - -def main(): - client = StreamAnalyticsManagementClient( - credential=DefaultAzureCredential(), - subscription_id="56b5e0a9-b645-407d-99b0-c64f86013e3d", - ) - - response = client.inputs.list_by_streaming_job( - resource_group_name="sjrg3276", - job_name="sj7804", - ) - for item in response: - print(item) - - -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Input_ListByStreamingJob_Diagnostics.json -if __name__ == "__main__": - main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/list_available_operations_for_the_stream_analytics_resource_provider.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/operation_list.py similarity index 89% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/list_available_operations_for_the_stream_analytics_resource_provider.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/operation_list.py index 1ab860462da9..9d76d344bde0 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/list_available_operations_for_the_stream_analytics_resource_provider.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/operation_list.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python list_available_operations_for_the_stream_analytics_resource_provider.py + python operation_list.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -34,6 +34,6 @@ def main(): print(item) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Operation_List.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Operation_List.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_an_azure_data_lake_store_output_with_json_serialization.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_azure_data_lake_store_json.py similarity index 92% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_an_azure_data_lake_store_output_with_json_serialization.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_azure_data_lake_store_json.py index f66a4838ad40..7dc36d84af29 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_an_azure_data_lake_store_output_with_json_serialization.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_azure_data_lake_store_json.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python create_an_azure_data_lake_store_output_with_json_serialization.py + python output_create_azure_data_lake_store_json.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -55,6 +55,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Create_AzureDataLakeStore_JSON.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Create_AzureDataLakeStore_JSON.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_an_azure_function_output.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_azure_function.py similarity index 93% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_an_azure_function_output.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_azure_function.py index 57f0dfe92c98..5c69c6d0ce46 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_an_azure_function_output.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_azure_function.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python create_an_azure_function_output.py + python output_create_azure_function.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -51,6 +51,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Create_AzureFunction.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Create_AzureFunction.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_an_azure_sql_database_output.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_azure_sql.py similarity index 93% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_an_azure_sql_database_output.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_azure_sql.py index bf30d7fbbb0c..cf833b82a09a 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_an_azure_sql_database_output.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_azure_sql.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python create_an_azure_sql_database_output.py + python output_create_azure_sql.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -51,6 +51,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Create_AzureSQL.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Create_AzureSQL.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_an_azure_table_output.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_azure_table.py similarity index 93% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_an_azure_table_output.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_azure_table.py index 65101cce48f9..b51884393108 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_an_azure_table_output.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_azure_table.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python create_an_azure_table_output.py + python output_create_azure_table.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -53,6 +53,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Create_AzureTable.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Create_AzureTable.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_blob_output_with_csv_serialization.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_blob_csv.py similarity index 89% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_blob_output_with_csv_serialization.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_blob_csv.py index d99cbbd3e481..b5e0c1e41637 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_blob_output_with_csv_serialization.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_blob_csv.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python create_a_blob_output_with_csv_serialization.py + python output_create_blob_csv.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -37,8 +37,6 @@ def main(): "properties": { "datasource": { "properties": { - "blobPathPrefix": "my/path", - "blobWriteMode": "Once", "container": "state", "dateFormat": "yyyy/MM/dd", "pathPattern": "{date}/{time}", @@ -54,6 +52,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Create_Blob_CSV.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Create_Blob_CSV.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_an_azure_data_warehouse_output.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_data_warehouse.py similarity index 90% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_an_azure_data_warehouse_output.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_data_warehouse.py index cb8697de1e1c..36391cff8382 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_an_azure_data_warehouse_output.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_data_warehouse.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python create_an_azure_data_warehouse_output.py + python output_create_data_warehouse.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -37,7 +37,6 @@ def main(): "properties": { "datasource": { "properties": { - "authenticationMode": "Msi", "database": "zhayaSQLpool", "password": "password123", "server": "asatestserver", @@ -52,6 +51,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Create_DataWarehouse.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Create_DataWarehouse.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_document_db_output.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_document_db.py similarity index 91% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_document_db_output.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_document_db.py index f10faca03cb2..9cc92d5c0f63 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_document_db_output.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_document_db.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python create_a_document_db_output.py + python output_create_document_db.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -39,7 +39,6 @@ def main(): "properties": { "accountId": "someAccountId", "accountKey": "accountKey==", - "authenticationMode": "Msi", "collectionNamePattern": "collection", "database": "db01", "documentId": "documentId", @@ -53,6 +52,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Create_DocumentDB.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Create_DocumentDB.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_an_event_hub_output_with_json_serialization.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_event_hub_json.py similarity index 85% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_an_event_hub_output_with_json_serialization.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_event_hub_json.py index 94e7738d29a2..ebe776fd36dc 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_an_event_hub_output_with_json_serialization.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_event_hub_json.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python create_an_event_hub_output_with_json_serialization.py + python output_create_event_hub_json.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -46,16 +46,12 @@ def main(): "type": "Microsoft.ServiceBus/EventHub", }, "serialization": {"properties": {"encoding": "UTF8", "format": "Array"}, "type": "Json"}, - "watermarkSettings": { - "maxWatermarkDifferenceAcrossPartitions": "16:14:30", - "watermarkMode": "SendCurrentPartitionWatermark", - }, } }, ) print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Create_EventHub_JSON.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Create_EventHub_JSON.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_gateway_message_bus_output.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_gateway_message_bus.py similarity index 91% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_gateway_message_bus_output.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_gateway_message_bus.py index d975fb019832..88bd57e3ddac 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_gateway_message_bus_output.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_gateway_message_bus.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python create_a_gateway_message_bus_output.py + python output_create_gateway_message_bus.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Create_GatewayMessageBus.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Create_GatewayMessageBus.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_power_bi_output.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_power_bi.py similarity index 94% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_power_bi_output.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_power_bi.py index 637d737f7d86..e54f9f7c86a8 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_power_bi_output.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_power_bi.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python create_a_power_bi_output.py + python output_create_power_bi.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -53,6 +53,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Create_PowerBI.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Create_PowerBI.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_service_bus_queue_output_with_avro_serialization.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_service_bus_queue_avro.py similarity index 92% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_service_bus_queue_output_with_avro_serialization.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_service_bus_queue_avro.py index c87b67f647cc..f04fa95d2d5b 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_service_bus_queue_output_with_avro_serialization.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_service_bus_queue_avro.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python create_a_service_bus_queue_output_with_avro_serialization.py + python output_create_service_bus_queue_avro.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -53,6 +53,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Create_ServiceBusQueue_Avro.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Create_ServiceBusQueue_Avro.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_service_bus_topic_output_with_csv_serialization.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_service_bus_topic_csv.py similarity index 92% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_service_bus_topic_output_with_csv_serialization.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_service_bus_topic_csv.py index 4639026b4158..4658b9d940aa 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_service_bus_topic_output_with_csv_serialization.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_create_service_bus_topic_csv.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python create_a_service_bus_topic_output_with_csv_serialization.py + python output_create_service_bus_topic_csv.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -52,6 +52,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Create_ServiceBusTopic_CSV.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Create_ServiceBusTopic_CSV.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/delete_an_output.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_delete.py similarity index 89% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/delete_an_output.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_delete.py index 4f03b3fb8408..ab395b8def25 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/delete_an_output.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_delete.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python delete_an_output.py + python output_delete.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -29,14 +29,13 @@ def main(): subscription_id="56b5e0a9-b645-407d-99b0-c64f86013e3d", ) - response = client.outputs.delete( + client.outputs.delete( resource_group_name="sjrg2157", job_name="sj6458", output_name="output1755", ) - print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Delete.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Delete.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_an_event_hub_output_with_json_serialization.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_azure_data_lake_store_json.py similarity index 90% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_an_event_hub_output_with_json_serialization.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_azure_data_lake_store_json.py index 9185b6f3babe..ccdda10df982 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_an_event_hub_output_with_json_serialization.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_azure_data_lake_store_json.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python get_an_event_hub_output_with_json_serialization.py + python output_get_azure_data_lake_store_json.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Get_EventHub_JSON.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Get_AzureDataLakeStore_JSON.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_an_azure_function_output.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_azure_function.py similarity index 91% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_an_azure_function_output.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_azure_function.py index edb03f6af2d9..bd8dbc95ad78 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_an_azure_function_output.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_azure_function.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python get_an_azure_function_output.py + python output_get_azure_function.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Get_AzureFunction.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Get_AzureFunction.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_an_azure_sql_database_output.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_azure_sql.py similarity index 91% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_an_azure_sql_database_output.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_azure_sql.py index af10854c17f0..ebd4aaa42a40 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_an_azure_sql_database_output.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_azure_sql.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python get_an_azure_sql_database_output.py + python output_get_azure_sql.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Get_AzureSQL.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Get_AzureSQL.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_an_azure_table_output.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_azure_table.py similarity index 91% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_an_azure_table_output.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_azure_table.py index f7894b844c28..5eeec0014c3f 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_an_azure_table_output.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_azure_table.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python get_an_azure_table_output.py + python output_get_azure_table.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Get_AzureTable.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Get_AzureTable.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_a_blob_output_with_csv_serialization.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_blob_csv.py similarity index 91% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_a_blob_output_with_csv_serialization.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_blob_csv.py index 8d88d94daf45..dac16813595c 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_a_blob_output_with_csv_serialization.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_blob_csv.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python get_a_blob_output_with_csv_serialization.py + python output_get_blob_csv.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Get_Blob_CSV.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Get_Blob_CSV.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_an_azure_data_warehouse_output.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_data_warehouse.py similarity index 91% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_an_azure_data_warehouse_output.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_data_warehouse.py index 2a5c6de6c1d3..0639fbd48c88 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_an_azure_data_warehouse_output.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_data_warehouse.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python get_an_azure_data_warehouse_output.py + python output_get_data_warehouse.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Get_DataWarehouse.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Get_DataWarehouse.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_a_power_bi_output.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_document_db.py similarity index 92% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_a_power_bi_output.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_document_db.py index d8650e010fa3..82212dd766de 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_a_power_bi_output.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_document_db.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python get_a_power_bi_output.py + python output_get_document_db.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Get_PowerBI.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Get_DocumentDB.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_an_azure_data_lake_store_output_with_json_serialization.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_event_hub_json.py similarity index 89% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_an_azure_data_lake_store_output_with_json_serialization.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_event_hub_json.py index 77f6a351e283..e10801719d15 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_an_azure_data_lake_store_output_with_json_serialization.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_event_hub_json.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python get_an_azure_data_lake_store_output_with_json_serialization.py + python output_get_event_hub_json.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Get_AzureDataLakeStore_JSON.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Get_EventHub_JSON.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_a_document_db_output.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_power_bi.py similarity index 91% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_a_document_db_output.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_power_bi.py index b52b6f777913..79e86369fb33 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_a_document_db_output.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_power_bi.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python get_a_document_db_output.py + python output_get_power_bi.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Get_DocumentDB.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Get_PowerBI.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_a_service_bus_queue_output_with_avro_serialization.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_service_bus_queue_avro.py similarity index 89% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_a_service_bus_queue_output_with_avro_serialization.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_service_bus_queue_avro.py index 5e2b6607c6ca..2595ccef6474 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_a_service_bus_queue_output_with_avro_serialization.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_service_bus_queue_avro.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python get_a_service_bus_queue_output_with_avro_serialization.py + python output_get_service_bus_queue_avro.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Get_ServiceBusQueue_Avro.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Get_ServiceBusQueue_Avro.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_a_service_bus_topic_output_with_csv_serialization.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_service_bus_topic_csv.py similarity index 89% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_a_service_bus_topic_output_with_csv_serialization.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_service_bus_topic_csv.py index c54c8a97d072..f0915b98d718 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_a_service_bus_topic_output_with_csv_serialization.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_get_service_bus_topic_csv.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python get_a_service_bus_topic_output_with_csv_serialization.py + python output_get_service_bus_topic_csv.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Get_ServiceBusTopic_CSV.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Get_ServiceBusTopic_CSV.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/list_all_outputs_in_a_streaming_job.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_list_by_streaming_job.py similarity index 91% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/list_all_outputs_in_a_streaming_job.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_list_by_streaming_job.py index 17d408160bfd..90c03389a96a 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/list_all_outputs_in_a_streaming_job.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_list_by_streaming_job.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python list_all_outputs_in_a_streaming_job.py + python output_list_by_streaming_job.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -37,6 +37,6 @@ def main(): print(item) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_ListByStreamingJob.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_ListByStreamingJob.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/test_the_connection_for_an_output.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_test.py similarity index 91% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/test_the_connection_for_an_output.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_test.py index 853203e148db..5dfa21f7b44e 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/test_the_connection_for_an_output.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_test.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python test_the_connection_for_an_output.py + python output_test.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Test.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Test.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_an_azure_data_lake_store_output_with_json_serialization.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_azure_data_lake_store.py similarity index 91% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_an_azure_data_lake_store_output_with_json_serialization.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_azure_data_lake_store.py index 39d7f20b8445..0148bbfe1978 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_an_azure_data_lake_store_output_with_json_serialization.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_azure_data_lake_store.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python update_an_azure_data_lake_store_output_with_json_serialization.py + python output_update_azure_data_lake_store.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -46,6 +46,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Update_AzureDataLakeStore.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Update_AzureDataLakeStore.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_an_azure_function_output.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_azure_function.py similarity index 92% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_an_azure_function_output.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_azure_function.py index 4dd892099c3f..bce29891646c 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_an_azure_function_output.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_azure_function.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python update_an_azure_function_output.py + python output_update_azure_function.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -45,6 +45,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Update_AzureFunction.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Update_AzureFunction.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_an_azure_sql_database_output.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_azure_sql.py similarity index 92% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_an_azure_sql_database_output.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_azure_sql.py index 152530aabc4c..1230034f44a0 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_an_azure_sql_database_output.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_azure_sql.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python update_an_azure_sql_database_output.py + python output_update_azure_sql.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -42,6 +42,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Update_AzureSQL.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Update_AzureSQL.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_an_azure_table_output.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_azure_table.py similarity index 92% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_an_azure_table_output.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_azure_table.py index 34dd63ceb8e3..d811575dd73c 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_an_azure_table_output.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_azure_table.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python update_an_azure_table_output.py + python output_update_azure_table.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -45,6 +45,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Update_AzureTable.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Update_AzureTable.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_a_blob_output_with_csv_serialization.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_blob.py similarity index 92% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_a_blob_output_with_csv_serialization.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_blob.py index 0d16255aa993..e0be95e6157c 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_a_blob_output_with_csv_serialization.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_blob.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python update_a_blob_output_with_csv_serialization.py + python output_update_blob.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -43,6 +43,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Update_Blob.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Update_Blob.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_an_azure_data_warehouse_output.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_data_warehouse.py similarity index 91% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_an_azure_data_warehouse_output.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_data_warehouse.py index c327a821f612..4b56fe89fdeb 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_an_azure_data_warehouse_output.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_data_warehouse.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python update_an_azure_data_warehouse_output.py + python output_update_data_warehouse.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -42,6 +42,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Update_DataWarehouse.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Update_DataWarehouse.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_a_document_db_output.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_document_db.py similarity index 92% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_a_document_db_output.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_document_db.py index 01b6ad91b5f6..eb0cb6d89bf6 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_a_document_db_output.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_document_db.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python update_a_document_db_output.py + python output_update_document_db.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -45,6 +45,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Update_DocumentDB.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Update_DocumentDB.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_an_event_hub_output_with_json_serialization.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_event_hub.py similarity index 92% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_an_event_hub_output_with_json_serialization.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_event_hub.py index 2dcdebdafffe..e828061c9647 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_an_event_hub_output_with_json_serialization.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_event_hub.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python update_an_event_hub_output_with_json_serialization.py + python output_update_event_hub.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -46,6 +46,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Update_EventHub.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Update_EventHub.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_a_power_bi_output.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_power_bi.py similarity index 92% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_a_power_bi_output.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_power_bi.py index a5b0fdb3cf0d..b899a85e0a6c 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_a_power_bi_output.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_power_bi.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python update_a_power_bi_output.py + python output_update_power_bi.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Update_PowerBI.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Update_PowerBI.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_a_service_bus_queue_output_with_avro_serialization.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_service_bus_queue.py similarity index 91% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_a_service_bus_queue_output_with_avro_serialization.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_service_bus_queue.py index c32af017b8ae..695f540025e8 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_a_service_bus_queue_output_with_avro_serialization.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_service_bus_queue.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python update_a_service_bus_queue_output_with_avro_serialization.py + python output_update_service_bus_queue.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -43,6 +43,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Update_ServiceBusQueue.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Update_ServiceBusQueue.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_a_service_bus_topic_output_with_csv_serialization.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_service_bus_topic.py similarity index 91% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_a_service_bus_topic_output_with_csv_serialization.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_service_bus_topic.py index d6a4fa6fb1a5..8714136fa5f5 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_a_service_bus_topic_output_with_csv_serialization.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/output_update_service_bus_topic.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python update_a_service_bus_topic_output_with_csv_serialization.py + python output_update_service_bus_topic.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -43,6 +43,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Output_Update_ServiceBusTopic.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Output_Update_ServiceBusTopic.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_private_endpoint.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/private_endpoint_create.py similarity index 93% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_private_endpoint.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/private_endpoint_create.py index 17ada055007b..7faabda3ca30 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_private_endpoint.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/private_endpoint_create.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python create_a_private_endpoint.py + python private_endpoint_create.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -49,6 +49,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2020-03-01-preview/examples/PrivateEndpoint_Create.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/PrivateEndpoint_Create.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/delete_a_private_endpoint.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/private_endpoint_delete.py similarity index 87% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/delete_a_private_endpoint.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/private_endpoint_delete.py index 5e22de44c16a..357a5ada9156 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/delete_a_private_endpoint.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/private_endpoint_delete.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python delete_a_private_endpoint.py + python private_endpoint_delete.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -29,14 +29,13 @@ def main(): subscription_id="34adfa4f-cedf-4dc0-ba29-b6d1a69ab345", ) - response = client.private_endpoints.begin_delete( + client.private_endpoints.begin_delete( resource_group_name="sjrg", cluster_name="testcluster", private_endpoint_name="testpe", ).result() - print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2020-03-01-preview/examples/PrivateEndpoint_Delete.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/PrivateEndpoint_Delete.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_a_private_endpoint.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/private_endpoint_get.py similarity index 92% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_a_private_endpoint.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/private_endpoint_get.py index 96b53ab51ba7..e8e8f5cdabda 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_a_private_endpoint.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/private_endpoint_get.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python get_a_private_endpoint.py + python private_endpoint_get.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2020-03-01-preview/examples/PrivateEndpoint_Get.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/PrivateEndpoint_Get.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_the_private_endpoints_in_a_cluster.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/private_endpoint_list_by_cluster.py similarity index 90% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_the_private_endpoints_in_a_cluster.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/private_endpoint_list_by_cluster.py index aee4768dabf1..7a99da0832a1 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_the_private_endpoints_in_a_cluster.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/private_endpoint_list_by_cluster.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python get_the_private_endpoints_in_a_cluster.py + python private_endpoint_list_by_cluster.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -37,6 +37,6 @@ def main(): print(item) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2020-03-01-preview/examples/PrivateEndpoint_ListByCluster.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/PrivateEndpoint_ListByCluster.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/sample_the_stream_analytics_input_data.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/sample_the_stream_analytics_input_data.py deleted file mode 100644 index b61728102863..000000000000 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/sample_the_stream_analytics_input_data.py +++ /dev/null @@ -1,62 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from azure.identity import DefaultAzureCredential -from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient - -""" -# PREREQUISITES - pip install azure-identity - pip install azure-mgmt-streamanalytics -# USAGE - python sample_the_stream_analytics_input_data.py - - Before run the sample, please set the values of the client ID, tenant ID and client secret - of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, - AZURE_CLIENT_SECRET. For more info about how to get the value, please see: - https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal -""" - - -def main(): - client = StreamAnalyticsManagementClient( - credential=DefaultAzureCredential(), - subscription_id="56b5e0a9-b645-407d-99b0-c64f86013e3d", - ) - - response = client.subscriptions.begin_sample_input( - location="West US", - sample_input={ - "compatibilityLevel": "1.2", - "dataLocale": "en-US", - "eventsUri": "http://myoutput.com", - "input": { - "properties": { - "datasource": { - "properties": { - "container": "state", - "dateFormat": "yyyy/MM/dd", - "pathPattern": "{date}/{time}", - "sourcePartitionCount": 16, - "storageAccounts": [{"accountKey": "someAccountKey==", "accountName": "someAccountName"}], - "timeFormat": "HH", - }, - "type": "Microsoft.Storage/Blob", - }, - "serialization": {"properties": {"encoding": "UTF8", "fieldDelimiter": ","}, "type": "Csv"}, - "type": "Stream", - } - }, - }, - ).result() - print(response) - - -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Subscription_SampleInput.json -if __name__ == "__main__": - main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/start_a_streaming_job_with_job_start_time_output_start_mode.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/start_a_streaming_job_with_job_start_time_output_start_mode.py deleted file mode 100644 index d2458076cacb..000000000000 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/start_a_streaming_job_with_job_start_time_output_start_mode.py +++ /dev/null @@ -1,41 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from azure.identity import DefaultAzureCredential -from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient - -""" -# PREREQUISITES - pip install azure-identity - pip install azure-mgmt-streamanalytics -# USAGE - python start_a_streaming_job_with_job_start_time_output_start_mode.py - - Before run the sample, please set the values of the client ID, tenant ID and client secret - of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, - AZURE_CLIENT_SECRET. For more info about how to get the value, please see: - https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal -""" - - -def main(): - client = StreamAnalyticsManagementClient( - credential=DefaultAzureCredential(), - subscription_id="56b5e0a9-b645-407d-99b0-c64f86013e3d", - ) - - response = client.streaming_jobs.begin_start( - resource_group_name="sjrg6936", - job_name="sj59", - ).result() - print(response) - - -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/StreamingJob_Start_JobStartTime.json -if __name__ == "__main__": - main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/start_a_streaming_job_with_last_output_event_time_output_start_mode.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/start_a_streaming_job_with_last_output_event_time_output_start_mode.py deleted file mode 100644 index 16a30a069cae..000000000000 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/start_a_streaming_job_with_last_output_event_time_output_start_mode.py +++ /dev/null @@ -1,41 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from azure.identity import DefaultAzureCredential -from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient - -""" -# PREREQUISITES - pip install azure-identity - pip install azure-mgmt-streamanalytics -# USAGE - python start_a_streaming_job_with_last_output_event_time_output_start_mode.py - - Before run the sample, please set the values of the client ID, tenant ID and client secret - of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, - AZURE_CLIENT_SECRET. For more info about how to get the value, please see: - https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal -""" - - -def main(): - client = StreamAnalyticsManagementClient( - credential=DefaultAzureCredential(), - subscription_id="56b5e0a9-b645-407d-99b0-c64f86013e3d", - ) - - response = client.streaming_jobs.begin_start( - resource_group_name="sjrg6936", - job_name="sj59", - ).result() - print(response) - - -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/StreamingJob_Start_LastOutputEventTime.json -if __name__ == "__main__": - main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_complete_streaming_job_(a_streaming_job_with_a_transformation,_at_least_1_input_and_at_least_1_output).py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_create_complete_job.py similarity index 82% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_complete_streaming_job_(a_streaming_job_with_a_transformation,_at_least_1_input_and_at_least_1_output).py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_create_complete_job.py index f9306d7465ac..8f261545ba2e 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_complete_streaming_job_(a_streaming_job_with_a_transformation,_at_least_1_input_and_at_least_1_output).py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_create_complete_job.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python create_a_complete_streaming_job_(a_streaming_job_with_a_transformation,_at_least_1_input_and_at_least_1_output).py + python streaming_job_create_complete_job.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -40,18 +40,6 @@ def main(): "eventsLateArrivalMaxDelayInSeconds": 5, "eventsOutOfOrderMaxDelayInSeconds": 0, "eventsOutOfOrderPolicy": "Drop", - "externals": { - "container": "mycontainer", - "path": "UserCustomCode.zip", - "refreshConfiguration": { - "dateFormat": "yyyy-dd-MM", - "pathPattern": "{date}\\\\{time}", - "refreshInterval": "00:01:00", - "refreshType": "Nonblocking", - "timeFormat": "HH", - }, - "storageAccount": {"accountKey": "mykey", "accountName": "mystorageaccount"}, - }, "functions": [], "inputs": [ { @@ -102,6 +90,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/StreamingJob_Create_CompleteJob.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/StreamingJob_Create_CompleteJob.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_streaming_job_shell_(a_streaming_job_with_no_inputs,_outputs,_transformation,_or_functions).py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_create_job_shell.py similarity index 90% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_streaming_job_shell_(a_streaming_job_with_no_inputs,_outputs,_transformation,_or_functions).py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_create_job_shell.py index 7229c539038c..82201798d5b9 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_streaming_job_shell_(a_streaming_job_with_no_inputs,_outputs,_transformation,_or_functions).py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_create_job_shell.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python create_a_streaming_job_shell_(a_streaming_job_with_no_inputs,_outputs,_transformation,_or_functions).py + python streaming_job_create_job_shell.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -52,6 +52,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/StreamingJob_Create_JobShell.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/StreamingJob_Create_JobShell.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/stop_a_streaming_job.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_delete.py similarity index 87% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/stop_a_streaming_job.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_delete.py index 08a44d99c853..5ec993c2230a 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/stop_a_streaming_job.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_delete.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python stop_a_streaming_job.py + python streaming_job_delete.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -29,13 +29,12 @@ def main(): subscription_id="56b5e0a9-b645-407d-99b0-c64f86013e3d", ) - response = client.streaming_jobs.begin_stop( + client.streaming_jobs.begin_delete( resource_group_name="sjrg6936", job_name="sj59", ).result() - print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/StreamingJob_Stop.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/StreamingJob_Delete.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_a_streaming_job_and_use_the_$expand_odata_query_parameter_to_expand_inputs,_outputs,_transformation,_and_functions.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_get_expand.py similarity index 86% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_a_streaming_job_and_use_the_$expand_odata_query_parameter_to_expand_inputs,_outputs,_transformation,_and_functions.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_get_expand.py index 8c4bdce66de0..e2b636d15040 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_a_streaming_job_and_use_the_$expand_odata_query_parameter_to_expand_inputs,_outputs,_transformation,_and_functions.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_get_expand.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python get_a_streaming_job_and_use_the_$expand_odata_query_parameter_to_expand_inputs,_outputs,_transformation,_and_functions.py + python streaming_job_get_expand.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -36,6 +36,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/StreamingJob_Get_Expand.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/StreamingJob_Get_Expand.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_a_streaming_job_and_do_not_use_the_$expand_odata_query_parameter.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_get_no_expand.py similarity index 89% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_a_streaming_job_and_do_not_use_the_$expand_odata_query_parameter.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_get_no_expand.py index f92727b524f8..cbb1ba7eb456 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_a_streaming_job_and_do_not_use_the_$expand_odata_query_parameter.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_get_no_expand.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python get_a_streaming_job_and_do_not_use_the_$expand_odata_query_parameter.py + python streaming_job_get_no_expand.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -36,6 +36,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/StreamingJob_Get_NoExpand.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/StreamingJob_Get_NoExpand.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/list_all_streaming_jobs_in_a_resource_group_and_use_the_$expand_odata_query_parameter_to_expand_inputs,_outputs,_transformation,_and_functions.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_list_by_resource_group_expand.py similarity index 84% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/list_all_streaming_jobs_in_a_resource_group_and_use_the_$expand_odata_query_parameter_to_expand_inputs,_outputs,_transformation,_and_functions.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_list_by_resource_group_expand.py index d2081857bec2..654e1af6081c 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/list_all_streaming_jobs_in_a_resource_group_and_use_the_$expand_odata_query_parameter_to_expand_inputs,_outputs,_transformation,_and_functions.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_list_by_resource_group_expand.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python list_all_streaming_jobs_in_a_resource_group_and_use_the_$expand_odata_query_parameter_to_expand_inputs,_outputs,_transformation,_and_functions.py + python streaming_job_list_by_resource_group_expand.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -36,6 +36,6 @@ def main(): print(item) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/StreamingJob_List_ByResourceGroup_Expand.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/StreamingJob_List_ByResourceGroup_Expand.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/list_all_streaming_jobs_in_a_resource_group_and_do_not_use_the_$expand_odata_query_parameter.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_list_by_resource_group_no_expand.py similarity index 87% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/list_all_streaming_jobs_in_a_resource_group_and_do_not_use_the_$expand_odata_query_parameter.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_list_by_resource_group_no_expand.py index 6cad46a7d139..6a8f6903c39e 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/list_all_streaming_jobs_in_a_resource_group_and_do_not_use_the_$expand_odata_query_parameter.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_list_by_resource_group_no_expand.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python list_all_streaming_jobs_in_a_resource_group_and_do_not_use_the_$expand_odata_query_parameter.py + python streaming_job_list_by_resource_group_no_expand.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -36,6 +36,6 @@ def main(): print(item) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/StreamingJob_List_ByResourceGroup_NoExpand.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/StreamingJob_List_ByResourceGroup_NoExpand.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/list_all_streaming_jobs_in_a_subscription_and_do_not_use_the_$expand_odata_query_parameter.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_list_by_subscription_expand.py similarity index 86% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/list_all_streaming_jobs_in_a_subscription_and_do_not_use_the_$expand_odata_query_parameter.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_list_by_subscription_expand.py index 3d2b24c7a8bc..e557c1b8ce48 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/list_all_streaming_jobs_in_a_subscription_and_do_not_use_the_$expand_odata_query_parameter.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_list_by_subscription_expand.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python list_all_streaming_jobs_in_a_subscription_and_do_not_use_the_$expand_odata_query_parameter.py + python streaming_job_list_by_subscription_expand.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -34,6 +34,6 @@ def main(): print(item) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/StreamingJob_List_BySubscription_NoExpand.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/StreamingJob_List_BySubscription_Expand.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/list_all_streaming_jobs_in_a_subscription_and_use_the_$expand_odata_query_parameter_to_expand_inputs,_outputs,_transformation,_and_functions.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_list_by_subscription_no_expand.py similarity index 84% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/list_all_streaming_jobs_in_a_subscription_and_use_the_$expand_odata_query_parameter_to_expand_inputs,_outputs,_transformation,_and_functions.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_list_by_subscription_no_expand.py index 126ac46f0b49..6f56ebfa1a95 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/list_all_streaming_jobs_in_a_subscription_and_use_the_$expand_odata_query_parameter_to_expand_inputs,_outputs,_transformation,_and_functions.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_list_by_subscription_no_expand.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python list_all_streaming_jobs_in_a_subscription_and_use_the_$expand_odata_query_parameter_to_expand_inputs,_outputs,_transformation,_and_functions.py + python streaming_job_list_by_subscription_no_expand.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -34,6 +34,6 @@ def main(): print(item) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/StreamingJob_List_BySubscription_Expand.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/StreamingJob_List_BySubscription_NoExpand.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/scale_a_streaming_job.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_scale.py similarity index 87% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/scale_a_streaming_job.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_scale.py index 6bc2f091abba..6f9b84c0781c 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/scale_a_streaming_job.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_scale.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python scale_a_streaming_job.py + python streaming_job_scale.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -29,13 +29,12 @@ def main(): subscription_id="56b5e0a9-b645-407d-99b0-c64f86013e3d", ) - response = client.streaming_jobs.begin_scale( + client.streaming_jobs.begin_scale( resource_group_name="sjrg6936", job_name="sj59", ).result() - print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/StreamingJob_Scale.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/StreamingJob_Scale.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/delete_a_streaming_job.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_start_custom_time.py similarity index 87% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/delete_a_streaming_job.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_start_custom_time.py index efbfe2b1f929..c15ef4de3788 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/delete_a_streaming_job.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_start_custom_time.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python delete_a_streaming_job.py + python streaming_job_start_custom_time.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -29,13 +29,12 @@ def main(): subscription_id="56b5e0a9-b645-407d-99b0-c64f86013e3d", ) - response = client.streaming_jobs.begin_delete( + client.streaming_jobs.begin_start( resource_group_name="sjrg6936", job_name="sj59", ).result() - print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/StreamingJob_Delete.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/StreamingJob_Start_CustomTime.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/start_a_streaming_job_with_custom_time_output_start_mode.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_start_job_start_time.py similarity index 85% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/start_a_streaming_job_with_custom_time_output_start_mode.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_start_job_start_time.py index bf7038c20857..ba4cc9b5af64 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/start_a_streaming_job_with_custom_time_output_start_mode.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_start_job_start_time.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python start_a_streaming_job_with_custom_time_output_start_mode.py + python streaming_job_start_job_start_time.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -29,13 +29,12 @@ def main(): subscription_id="56b5e0a9-b645-407d-99b0-c64f86013e3d", ) - response = client.streaming_jobs.begin_start( + client.streaming_jobs.begin_start( resource_group_name="sjrg6936", job_name="sj59", ).result() - print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/StreamingJob_Start_CustomTime.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/StreamingJob_Start_JobStartTime.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_start_last_output_event_time.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_start_last_output_event_time.py new file mode 100644 index 000000000000..3cb984fd82a7 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_start_last_output_event_time.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-streamanalytics +# USAGE + python streaming_job_start_last_output_event_time.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = StreamAnalyticsManagementClient( + credential=DefaultAzureCredential(), + subscription_id="56b5e0a9-b645-407d-99b0-c64f86013e3d", + ) + + client.streaming_jobs.begin_start( + resource_group_name="sjrg6936", + job_name="sj59", + ).result() + + +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/StreamingJob_Start_LastOutputEventTime.json +if __name__ == "__main__": + main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_stop.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_stop.py new file mode 100644 index 000000000000..03deb14a95f1 --- /dev/null +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_stop.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-streamanalytics +# USAGE + python streaming_job_stop.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = StreamAnalyticsManagementClient( + credential=DefaultAzureCredential(), + subscription_id="56b5e0a9-b645-407d-99b0-c64f86013e3d", + ) + + client.streaming_jobs.begin_stop( + resource_group_name="sjrg6936", + job_name="sj59", + ).result() + + +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/StreamingJob_Stop.json +if __name__ == "__main__": + main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_a_streaming_job.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_update.py similarity index 92% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_a_streaming_job.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_update.py index de19df6b92d3..e577af22e3b5 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_a_streaming_job.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/streaming_job_update.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python update_a_streaming_job.py + python streaming_job_update.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -39,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/StreamingJob_Update.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/StreamingJob_Update.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/list_subscription_quota_information_in_west_us.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/subscription_list_quotas.py similarity index 90% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/list_subscription_quota_information_in_west_us.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/subscription_list_quotas.py index 80b5e18a96cf..2f422b50e3e3 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/list_subscription_quota_information_in_west_us.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/subscription_list_quotas.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python list_subscription_quota_information_in_west_us.py + python subscription_list_quotas.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -35,6 +35,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Subscription_ListQuotas.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Subscription_ListQuotas.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/test_the_stream_analytics_input.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/test_the_stream_analytics_input.py deleted file mode 100644 index e3660bccee17..000000000000 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/test_the_stream_analytics_input.py +++ /dev/null @@ -1,59 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from azure.identity import DefaultAzureCredential -from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient - -""" -# PREREQUISITES - pip install azure-identity - pip install azure-mgmt-streamanalytics -# USAGE - python test_the_stream_analytics_input.py - - Before run the sample, please set the values of the client ID, tenant ID and client secret - of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, - AZURE_CLIENT_SECRET. For more info about how to get the value, please see: - https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal -""" - - -def main(): - client = StreamAnalyticsManagementClient( - credential=DefaultAzureCredential(), - subscription_id="56b5e0a9-b645-407d-99b0-c64f86013e3d", - ) - - response = client.subscriptions.begin_test_input( - location="West US", - test_input={ - "input": { - "properties": { - "datasource": { - "properties": { - "container": "state", - "dateFormat": "yyyy/MM/dd", - "pathPattern": "{date}/{time}", - "sourcePartitionCount": 16, - "storageAccounts": [{"accountKey": "someAccountKey==", "accountName": "someAccountName"}], - "timeFormat": "HH", - }, - "type": "Microsoft.Storage/Blob", - }, - "serialization": {"properties": {"encoding": "UTF8", "fieldDelimiter": ","}, "type": "Csv"}, - "type": "Stream", - } - } - }, - ).result() - print(response) - - -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Subscription_TestInput.json -if __name__ == "__main__": - main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/test_the_stream_analytics_output.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/test_the_stream_analytics_output.py deleted file mode 100644 index 8df1b8c7db47..000000000000 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/test_the_stream_analytics_output.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from azure.identity import DefaultAzureCredential -from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient - -""" -# PREREQUISITES - pip install azure-identity - pip install azure-mgmt-streamanalytics -# USAGE - python test_the_stream_analytics_output.py - - Before run the sample, please set the values of the client ID, tenant ID and client secret - of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, - AZURE_CLIENT_SECRET. For more info about how to get the value, please see: - https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal -""" - - -def main(): - client = StreamAnalyticsManagementClient( - credential=DefaultAzureCredential(), - subscription_id="56b5e0a9-b645-407d-99b0-c64f86013e3d", - ) - - response = client.subscriptions.begin_test_output( - location="West US", - test_output={ - "output": { - "properties": { - "datasource": { - "properties": { - "container": "state", - "dateFormat": "yyyy/MM/dd", - "pathPattern": "{date}/{time}", - "storageAccounts": [{"accountKey": "accountKey==", "accountName": "someAccountName"}], - "timeFormat": "HH", - }, - "type": "Microsoft.Storage/Blob", - }, - "serialization": {"properties": {"encoding": "UTF8", "fieldDelimiter": ","}, "type": "Csv"}, - } - } - }, - ).result() - print(response) - - -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Subscription_TestOutput.json -if __name__ == "__main__": - main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/test_the_stream_analytics_query.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/test_the_stream_analytics_query.py deleted file mode 100644 index f64b563179e2..000000000000 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/test_the_stream_analytics_query.py +++ /dev/null @@ -1,80 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from azure.identity import DefaultAzureCredential -from azure.mgmt.streamanalytics import StreamAnalyticsManagementClient - -""" -# PREREQUISITES - pip install azure-identity - pip install azure-mgmt-streamanalytics -# USAGE - python test_the_stream_analytics_query.py - - Before run the sample, please set the values of the client ID, tenant ID and client secret - of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, - AZURE_CLIENT_SECRET. For more info about how to get the value, please see: - https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal -""" - - -def main(): - client = StreamAnalyticsManagementClient( - credential=DefaultAzureCredential(), - subscription_id="56b5e0a9-b645-407d-99b0-c64f86013e3d", - ) - - response = client.subscriptions.begin_test_query( - location="West US", - test_query={ - "diagnostics": {"path": "/pathto/subdirectory", "writeUri": "http://myoutput.com"}, - "streamingJob": { - "location": "West US", - "properties": { - "compatibilityLevel": "1.0", - "dataLocale": "en-US", - "eventsLateArrivalMaxDelayInSeconds": 5, - "eventsOutOfOrderMaxDelayInSeconds": 0, - "eventsOutOfOrderPolicy": "Drop", - "functions": [], - "inputs": [ - { - "name": "inputtest", - "properties": { - "datasource": {"properties": {"payloadUri": "http://myinput.com"}, "type": "Raw"}, - "serialization": {"properties": {"encoding": "UTF8"}, "type": "Json"}, - "type": "Stream", - }, - } - ], - "outputErrorPolicy": "Drop", - "outputs": [ - { - "name": "outputtest", - "properties": { - "datasource": {"properties": {"payloadUri": "http://myoutput.com"}, "type": "Raw"}, - "serialization": {"type": "Json"}, - }, - } - ], - "sku": {"name": "Standard"}, - "transformation": { - "name": "transformationtest", - "properties": {"query": "Select Id, Name from inputtest", "streamingUnits": 1}, - }, - }, - "tags": {"key1": "value1", "key3": "value3", "randomKey": "randomValue"}, - }, - }, - ).result() - print(response) - - -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Subscription_TestQuery.json -if __name__ == "__main__": - main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_transformation.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/transformation_create.py similarity index 92% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_transformation.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/transformation_create.py index cddb92b19569..da072246b03c 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/create_a_transformation.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/transformation_create.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python create_a_transformation.py + python transformation_create.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Transformation_Create.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Transformation_Create.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_a_transformation.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/transformation_get.py similarity index 92% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_a_transformation.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/transformation_get.py index c7a1ddc66b79..187f6e490320 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/get_a_transformation.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/transformation_get.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python get_a_transformation.py + python transformation_get.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Transformation_Get.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Transformation_Get.json if __name__ == "__main__": main() diff --git a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_a_transformation.py b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/transformation_update.py similarity index 92% rename from sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_a_transformation.py rename to sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/transformation_update.py index a7b0b77230b2..0d47b16bdf53 100644 --- a/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/update_a_transformation.py +++ b/sdk/streamanalytics/azure-mgmt-streamanalytics/generated_samples/transformation_update.py @@ -14,7 +14,7 @@ pip install azure-identity pip install azure-mgmt-streamanalytics # USAGE - python update_a_transformation.py + python transformation_update.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/preview/2021-10-01-preview/examples/Transformation_Update.json +# x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Transformation_Update.json if __name__ == "__main__": main()