diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 99513128e433..a1fbb0969070 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -217,11 +217,14 @@ # ServiceLabel: %Cognitive - Face # ServiceOwners: @dipidoo @longli0 @ShaoAnLin @leareai @Han-msft +# PRLabel: %Cognitive - Content Understanding +/sdk/contentunderstanding/ @yungshinlintw @bojunehsu @changjian-wang + # PRLabel: %Cognitive - Form Recognizer -/sdk/documentintelligence/ @vkurpad +/sdk/documentintelligence/ @yungshinlintw @bojunehsu # PRLabel: %Cognitive - Form Recognizer -/sdk/formrecognizer/ @vkurpad +/sdk/formrecognizer/ @yungshinlintw @bojunehsu # ServiceLabel: %Cognitive - Form Recognizer # ServiceOwners: @vkurpad diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/.gitignore b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/.gitignore new file mode 100644 index 000000000000..752374e218e7 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/.gitignore @@ -0,0 +1,13 @@ +# Local-only files and temporary scripts (not committed to git) +.local_only/ + +# User-specific configuration files (never commit appsettings.json - they may contain secrets) +**/appsettings.json +!**/appsettings.json.sample + +# Sample output directories (generated by sample execution) +**/sample_output/ + +# Test recordings (SessionRecords) - should not be committed +**/SessionRecords/ + diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/Azure.AI.ContentUnderstanding.sln b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/Azure.AI.ContentUnderstanding.sln new file mode 100644 index 000000000000..3c3c9cf35e36 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/Azure.AI.ContentUnderstanding.sln @@ -0,0 +1,30 @@ +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.0.31903.59 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Azure.AI.ContentUnderstanding", "src\Azure.AI.ContentUnderstanding.csproj", "{28FF4005-4467-4E36-92E7-DEA27DEB1519}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Azure.Core.TestFramework", "..\..\core\Azure.Core.TestFramework\src\Azure.Core.TestFramework.csproj", "{C2E8EBF5-F05A-22ED-9231-040E2E2D8446}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Release|Any CPU = Release|Any CPU + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {28FF4005-4467-4E36-92E7-DEA27DEB1519}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {28FF4005-4467-4E36-92E7-DEA27DEB1519}.Debug|Any CPU.Build.0 = Debug|Any CPU + {28FF4005-4467-4E36-92E7-DEA27DEB1519}.Release|Any CPU.ActiveCfg = Release|Any CPU + {28FF4005-4467-4E36-92E7-DEA27DEB1519}.Release|Any CPU.Build.0 = Release|Any CPU + {C2E8EBF5-F05A-22ED-9231-040E2E2D8446}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C2E8EBF5-F05A-22ED-9231-040E2E2D8446}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C2E8EBF5-F05A-22ED-9231-040E2E2D8446}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C2E8EBF5-F05A-22ED-9231-040E2E2D8446}.Release|Any CPU.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + SolutionGuid = {A97F4B90-2591-4689-B1F8-5F21FE6D6CAE} + EndGlobalSection +EndGlobal diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/CHANGELOG.md b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/CHANGELOG.md new file mode 100644 index 000000000000..e615675b1305 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/CHANGELOG.md @@ -0,0 +1,14 @@ +# Release History + +## 1.0.0-beta.1 (Unreleased) + +### Features Added +- Initial release of Azure AI Content Understanding client library for .NET +- Added `ContentUnderstandingClient` for analyzing documents, audio, and video content +- Added `AnalyzeResultOperation` class that extends `Operation` and provides access to the operation ID via the `Id` property + +### Breaking Changes + +### Bugs Fixed + +### Other Changes diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/Directory.Build.props b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/Directory.Build.props new file mode 100644 index 000000000000..1a9611bd4924 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/Directory.Build.props @@ -0,0 +1,6 @@ + + + + diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/README.md b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/README.md new file mode 100644 index 000000000000..b7d0927f2140 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/README.md @@ -0,0 +1,268 @@ +# Azure Content Understanding client library for .NET + +Azure AI Content Understanding is a multimodal AI service that extracts semantic content from documents, audio, and video files. It transforms unstructured content into structured, machine-readable data optimized for retrieval-augmented generation (RAG) and automated workflows. + +Use the client library for Azure AI Content Understanding to: + +* **Extract document content** - Extract text, tables, figures, layout information, and structured markdown from documents (PDF, images, Office documents) +* **Transcribe and analyze audio** - Convert audio content into searchable transcripts with speaker diarization and timing information +* **Analyze video content** - Extract visual frames, transcribe audio tracks, and generate structured summaries from video files +* **Create custom analyzers** - Build domain-specific analyzers for specialized content extraction needs +* **Classify documents** - Automatically categorize and organize documents by type or content + +[Source code][source_code] | [Package (NuGet)] | [API reference documentation] | [Product documentation][product_docs] + +## Getting started + +### Install the package + +Install the client library for .NET with [NuGet][nuget]: + +```dotnetcli +dotnet add package Azure.AI.ContentUnderstanding --prerelease +``` + +### Prerequisites + +> You must have an Azure subscription and a **Microsoft Foundry resource**. To create a Microsoft Foundry resource, follow the steps in the [Azure Content Understanding quickstart][cu_quickstart]. In order to take advantage of the C# 8.0 syntax, it is recommended that you compile using the [.NET Core SDK][dotnet_sdk] 3.0 or higher with a [language version][csharp_lang_version] of `latest`. + +### Configuring Microsoft Foundry resource + +Before using the Content Understanding SDK, you need to set up a Microsoft Foundry resource and deploy the required large language models. Content Understanding currently uses OpenAI GPT models (such as gpt-4.1, gpt-4.1-mini, and text-embedding-3-large). + +#### Step 1: Create Microsoft Foundry resource + +> **Important:** You must create your Microsoft Foundry resource in a region that supports Content Understanding. For a list of available regions, see [Azure Content Understanding region and language support][cu_region_support]. + +1. Follow the steps in the [Azure Content Understanding quickstart][cu_quickstart] to create a Microsoft Foundry resource in the Azure portal +2. Get your Foundry resource's endpoint URL from Azure Portal: + - Go to [Azure Portal][azure_portal] + - Navigate to your Microsoft Foundry resource + - Go to **Resource Management** > **Keys and Endpoint** + - Copy the **Endpoint** URL (typically `https://.services.ai.azure.com/`) + +**Important: Grant Required Permissions** + +After creating your Microsoft Foundry resource, you must grant yourself the **Cognitive Services User** role to enable API calls for setting default model deployments: + +1. Go to [Azure Portal][azure_portal] +2. Navigate to your Microsoft Foundry resource +3. Go to **Access Control (IAM)** in the left menu +4. Click **Add** > **Add role assignment** +5. Select the **Cognitive Services User** role +6. Assign it to yourself (or the user/service principal that will run the application) + +> **Note:** This role assignment is required even if you are the owner of the resource. Without this role, you will not be able to call the Content Understanding API to configure model deployments for prebuilt analyzers. + +#### Step 2: Deploy required models + +**Important:** The prebuilt analyzers require model deployments. You must deploy these models before using prebuilt analyzers: +- `prebuilt-documentSearch`, `prebuilt-imageSearch`, `prebuilt-audioSearch`, `prebuilt-videoSearch` require **gpt-4.1-mini** and **text-embedding-3-large** +- Other prebuilt analyzers like `prebuilt-invoice`, `prebuilt-receipt` require **gpt-4.1** and **text-embedding-3-large** + +To deploy a model: + +1. In Microsoft Foundry, go to **Deployments** > **Deploy model** > **Deploy base model** +2. Search for and select the model you want to deploy. Currently, prebuilt analyzers require models such as `gpt-4.1`, `gpt-4.1-mini`, and `text-embedding-3-large` +3. Complete the deployment with your preferred settings +4. Note the deployment name you chose (by convention, use the model name as the deployment name, e.g., `gpt-4.1` for the `gpt-4.1` model). You can use any name you prefer, but you'll need to note it for use in Step 3 when configuring model deployments. + +Repeat this process for each model required by your prebuilt analyzers. + +For more information on deploying models, see [Create model deployments in Microsoft Foundry portal][deploy_models_docs]. + +#### Step 3: Configure model deployments (required for prebuilt analyzers) + +> **IMPORTANT:** Before using prebuilt analyzers, you must configure the model deployments. This is a **one-time setup per Microsoft Foundry resource** that maps your deployed models to the prebuilt analyzers. + +You need to configure the default model mappings in your Microsoft Foundry resource. This can be done programmatically using the SDK. The configuration maps your deployed models (currently gpt-4.1, gpt-4.1-mini, and text-embedding-3-large) to the large language models required by prebuilt analyzers. + +To configure model deployments using code, see [Sample 00: Configure model deployment defaults][sample00] for a complete example. The sample shows how to: +- Map your deployed models to the models required by prebuilt analyzers +- Retrieve the current default model deployment configuration + +> **Note:** The configuration is persisted in your Microsoft Foundry resource, so you only need to run this once per resource (or whenever you change your deployment names). If you have multiple Microsoft Foundry resources, you need to configure each one separately. + +### Authenticate the client + +To authenticate the client, you need your Microsoft Foundry resource endpoint and credentials. You can use either an API key or Microsoft Entra ID authentication. + +#### Using DefaultAzureCredential + +The simplest way to authenticate is using `DefaultAzureCredential`, which supports multiple authentication methods and works well in both local development and production environments: + +```C# Snippet:CreateContentUnderstandingClient +string endpoint = ""; +var credential = new DefaultAzureCredential(); +var client = new ContentUnderstandingClient(new Uri(endpoint), credential); +``` + +#### Using API key + +You can also authenticate using an API key from your Microsoft Foundry resource: + +```C# Snippet:CreateContentUnderstandingClientApiKey +string endpoint = ""; +string apiKey = ""; +var client = new ContentUnderstandingClient(new Uri(endpoint), new AzureKeyCredential(apiKey)); +``` + +To get your API key: +1. Go to [Azure Portal][azure_portal] +2. Navigate to your Microsoft Foundry resource +3. Go to **Resource Management** > **Keys and Endpoint** +4. Copy one of the **Keys** (Key1 or Key2) + +For more information on authentication, see [Azure Identity client library][azure_identity_readme]. + +## Key concepts + +### Prebuilt analyzers + +Content Understanding provides a rich set of prebuilt analyzers that are ready to use without any configuration. These analyzers are powered by knowledge bases of thousands of real-world document examples, enabling them to understand document structure and adapt to variations in format and content. + +Prebuilt analyzers are organized into several categories: + +* **RAG analyzers** - Optimized for retrieval-augmented generation scenarios with semantic analysis and markdown extraction: + * **`prebuilt-documentSearch`** - Extracts content from documents (PDF, images, Office documents) with layout preservation, table detection, figure analysis, and structured markdown output. Optimized for RAG scenarios. + * **`prebuilt-imageSearch`** - Analyzes standalone images to generate descriptions, extract visual features, and identify objects and scenes within images. Optimized for image understanding and search scenarios. + * **`prebuilt-audioSearch`** - Transcribes audio content with speaker diarization, timing information, and conversation summaries. Supports multilingual transcription. + * **`prebuilt-videoSearch`** - Analyzes video content with visual frame extraction, audio transcription, and structured summaries. Provides temporal alignment of visual and audio content. +* **Content extraction analyzers** - Focus on OCR and layout analysis (e.g., `prebuilt-read`, `prebuilt-layout`) +* **Base analyzers** - Fundamental content processing capabilities used as parent analyzers for custom analyzers (e.g., `prebuilt-document`, `prebuilt-image`, `prebuilt-audio`, `prebuilt-video`) +* **Domain-specific analyzers** - Preconfigured analyzers for common document categories including financial documents (invoices, receipts, bank statements), identity documents (passports, driver's licenses), tax forms, mortgage documents, and contracts +* **Utility analyzers** - Specialized tools for schema generation and field extraction (e.g., `prebuilt-documentFieldSchema`, `prebuilt-documentFields`) + +For a complete list of available prebuilt analyzers and their capabilities, see the [Prebuilt analyzers documentation][prebuilt-analyzers-docs]. + +> +### Content types + +The API returns different content types based on the input: + +* **`document`** - For document files (PDF, images, Office documents). Contains pages, tables, figures, paragraphs, and markdown representation. +* **`audioVisual`** - For audio and video files. Contains transcript phrases, timing information, and for video, visual frame references. + +### Asynchronous operations + +Content Understanding operations are asynchronous long-running operations. The workflow is: + +1. **Begin Analysis** - Start the analysis operation (returns immediately with an operation location) +2. **Poll for Results** - Poll the operation location until the analysis completes +3. **Process Results** - Extract and display the structured results + +The SDK provides `Operation` types that handle polling automatically when using `WaitUntil.Completed`. For analysis operations, the SDK returns `AnalyzeResultOperation`, which extends `Operation` and provides access to the operation ID via the `Id` property. This operation ID can be used with `GetResultFile*` and `DeleteResult*` methods. + +### Main classes + +* **`ContentUnderstandingClient`** - The main client for analyzing content, as well as creating, managing, and configuring analyzers +* **`AnalyzeResult`** - Contains the structured results of an analysis operation, including content elements, markdown, and metadata +* **`AnalyzeResultOperation`** - A long-running operation wrapper for analysis results that provides access to the operation ID + +### Thread safety + +We guarantee that all client instance methods are thread-safe and independent of each other ([guideline][thread_safety_guideline]). This ensures that the recommendation of reusing client instances is always safe, even across threads. + +### Additional concepts + +[Client options][client_options] | +[Accessing the response][accessing_response] | +[Long-running operations][long_running_operations] | +[Handling failures][handling_failures] | +[Diagnostics][diagnostics] | +[Mocking][mocking] | +[Client lifetime][client_lifetime] + + +## Examples + +You can familiarize yourself with different APIs using [Samples][samples_directory]. + +The samples demonstrate: + +* **Configuration** - Configure model deployment defaults for prebuilt analyzers +* **Document Content Extraction** - Extract structured markdown content from PDFs and images using `prebuilt-documentSearch`, optimized for RAG (Retrieval-Augmented Generation) applications +* **Domain-Specific Analysis** - Extract structured fields from invoices using `prebuilt-invoice` +* **Advanced Document Features** - Extract charts, hyperlinks, formulas, and annotations from documents +* **Custom Analyzers** - Create custom analyzers with field schemas for specialized extraction needs +* **Document Classification** - Create and use classifiers to categorize documents +* **Analyzer Management** - Get, list, update, copy, and delete analyzers +* **Result Management** - Retrieve result files from video analysis and delete analysis results + +See the [samples directory][samples_directory] for complete examples. + +## Troubleshooting + +### Common issues + +**Error: "Access denied due to invalid subscription key or wrong API endpoint"** +- Verify your endpoint URL is correct and includes the trailing slash +- Ensure your API key is valid or that your Microsoft Entra ID credentials have the correct permissions +- Make sure you have the **Cognitive Services User** role assigned to your account + +**Error: "Model deployment not found" or "Default model deployment not configured"** +- Ensure you have deployed the required models (gpt-4.1, gpt-4.1-mini, text-embedding-3-large) in Microsoft Foundry +- Verify you have configured the default model deployments (see [Configure Model Deployments](#step-3-configure-model-deployments-required-for-prebuilt-analyzers)) +- Check that your deployment names match what you configured in the defaults + +**Error: "Operation failed" or timeout** +- Content Understanding operations are asynchronous and may take time to complete +- Ensure you are properly polling for results using `WaitUntil.Completed` or manual polling +- Check the operation status for more details about the failure + +### Enable logging + +To enable logging for debugging, configure logging in your application: + +```csharp +using Azure.Core.Diagnostics; + +// Enable console logging +using AzureEventSourceListener listener = AzureEventSourceListener.CreateConsoleLogger(); +``` + +For more information, see [Diagnostics samples][diagnostics]. + +## Next steps + +* Explore the [samples directory][samples_directory] for complete code examples +* Read the [Azure AI Content Understanding documentation][product_docs] for detailed service information + +## Contributing + +This project welcomes contributions and suggestions. Most contributions require you to agree to a Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us the rights to use your contribution. For details, visit [cla.microsoft.com][cla]. + +When you submit a pull request, a CLA-bot will automatically determine whether you need to provide a CLA and decorate the PR appropriately (e.g., label, comment). Simply follow the instructions provided by the bot. You will only need to do this once across all repos using our CLA. + +This project has adopted the [Microsoft Open Source Code of Conduct][code_of_conduct]. For more information see the [Code of Conduct FAQ][code_of_conduct_faq] or contact [opencode@microsoft.com][opencode_email] with any additional questions or comments. + + +[source_code]: https://github.com/Azure/azure-sdk-for-net/tree/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src + +[product_docs]: https://learn.microsoft.com/azure/ai-services/content-understanding/ +[nuget]: https://www.nuget.org/ +[azure_subscription]: https://azure.microsoft.com/free/dotnet/ +[cu_quickstart]: https://learn.microsoft.com/azure/ai-services/content-understanding/quickstart/use-rest-api?tabs=portal%2Cdocument +[cu_region_support]: https://learn.microsoft.com/azure/ai-services/content-understanding/language-region-support +[dotnet_sdk]: https://dotnet.microsoft.com/download +[csharp_lang_version]: https://learn.microsoft.com/dotnet/csharp/language-reference/configure-language-version#override-a-default +[azure_portal]: https://portal.azure.com/ +[deploy_models_docs]: https://learn.microsoft.com/azure/ai-studio/how-to/deploy-models-openai +[azure_identity_readme]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/identity/Azure.Identity/README.md +[thread_safety_guideline]: https://azure.github.io/azure-sdk/dotnet_introduction.html#dotnet-service-methods-thread-safety +[client_options]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/core/Azure.Core/README.md#configuring-service-clients-using-clientoptions +[accessing_response]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/core/Azure.Core/README.md#accessing-http-response-details-using-responset +[long_running_operations]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/core/Azure.Core/README.md#consuming-long-running-operations-using-operationt +[handling_failures]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/core/Azure.Core/README.md#reporting-errors-requestfailedexception +[diagnostics]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/core/Azure.Core/samples/Diagnostics.md +[mocking]: https://learn.microsoft.com/dotnet/azure/sdk/unit-testing-mocking +[client_lifetime]: https://devblogs.microsoft.com/azure-sdk/lifetime-management-and-thread-safety-guarantees-of-azure-sdk-net-clients/ +[samples_directory]: https://github.com/Azure/azure-sdk-for-net/tree/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples +[sample00]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample00_ConfigureDefaults.md +[prebuilt-analyzers-docs]: https://learn.microsoft.com/azure/ai-services/content-understanding/concepts/prebuilt-analyzers +[cla]: https://cla.microsoft.com +[code_of_conduct]: https://opensource.microsoft.com/codeofconduct/ +[code_of_conduct_faq]: https://opensource.microsoft.com/codeofconduct/faq/ +[opencode_email]: mailto:opencode@microsoft.com +[style-guide-msft]: https://learn.microsoft.com/style-guide/capitalization +[style-guide-cloud]: https://aka.ms/azsdk/cloud-style-guide diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/api/Azure.AI.ContentUnderstanding.net8.0.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/api/Azure.AI.ContentUnderstanding.net8.0.cs new file mode 100644 index 000000000000..67ab757b2815 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/api/Azure.AI.ContentUnderstanding.net8.0.cs @@ -0,0 +1,1276 @@ +namespace Azure.AI.ContentUnderstanding +{ + public partial class AnalyzeInput : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + public AnalyzeInput() { } + public System.BinaryData Data { get { throw null; } set { } } + public string InputRange { get { throw null; } set { } } + public string MimeType { get { throw null; } set { } } + public string Name { get { throw null; } set { } } + public System.Uri Url { get { throw null; } set { } } + protected virtual Azure.AI.ContentUnderstanding.AnalyzeInput JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.AnalyzeInput PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.AnalyzeInput System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.AnalyzeInput System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class AnalyzeResult : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal AnalyzeResult() { } + public string AnalyzerId { get { throw null; } } + public string ApiVersion { get { throw null; } } + public System.Collections.Generic.IList Contents { get { throw null; } } + public System.DateTimeOffset? CreatedAt { get { throw null; } } + public string StringEncoding { get { throw null; } } + public System.Collections.Generic.IList Warnings { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.AnalyzeResult JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.AnalyzeResult PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.AnalyzeResult System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.AnalyzeResult System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class AnalyzeResultOperation : Azure.Operation + { + protected AnalyzeResultOperation() { } + public override bool HasCompleted { get { throw null; } } + public override bool HasValue { get { throw null; } } + public override string Id { get { throw null; } } + public override Azure.AI.ContentUnderstanding.AnalyzeResult Value { get { throw null; } } + public override Azure.Response GetRawResponse() { throw null; } + public override Azure.Response UpdateStatus(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public override System.Threading.Tasks.ValueTask UpdateStatusAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public override Azure.Response WaitForCompletion(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public override Azure.Response WaitForCompletion(System.TimeSpan pollingInterval, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public override System.Threading.Tasks.ValueTask> WaitForCompletionAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public override System.Threading.Tasks.ValueTask> WaitForCompletionAsync(System.TimeSpan pollingInterval, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct AnnotationFormat : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public AnnotationFormat(string value) { throw null; } + public static Azure.AI.ContentUnderstanding.AnnotationFormat Markdown { get { throw null; } } + public static Azure.AI.ContentUnderstanding.AnnotationFormat None { get { throw null; } } + public bool Equals(Azure.AI.ContentUnderstanding.AnnotationFormat other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.AI.ContentUnderstanding.AnnotationFormat left, Azure.AI.ContentUnderstanding.AnnotationFormat right) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.AnnotationFormat (string value) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.AnnotationFormat? (string value) { throw null; } + public static bool operator !=(Azure.AI.ContentUnderstanding.AnnotationFormat left, Azure.AI.ContentUnderstanding.AnnotationFormat right) { throw null; } + public override string ToString() { throw null; } + } + public partial class ArrayField : Azure.AI.ContentUnderstanding.ContentField, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal ArrayField() { } + public int Count { get { throw null; } } + public Azure.AI.ContentUnderstanding.ContentField this[int index] { get { throw null; } } + public System.Collections.Generic.IList ValueArray { get { throw null; } } + protected override Azure.AI.ContentUnderstanding.ContentField JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected override Azure.AI.ContentUnderstanding.ContentField PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.ArrayField System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.ArrayField System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class AudioVisualContent : Azure.AI.ContentUnderstanding.MediaContent, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal AudioVisualContent() { } + public System.Collections.Generic.IList CameraShotTimesMs { get { throw null; } } + public long EndTimeMs { get { throw null; } } + public int? Height { get { throw null; } } + public System.Collections.Generic.IList KeyFrameTimesMs { get { throw null; } } + public System.Collections.Generic.IList Segments { get { throw null; } } + public long StartTimeMs { get { throw null; } } + public System.Collections.Generic.IList TranscriptPhrases { get { throw null; } } + public int? Width { get { throw null; } } + protected override Azure.AI.ContentUnderstanding.MediaContent JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected override Azure.AI.ContentUnderstanding.MediaContent PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.AudioVisualContent System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.AudioVisualContent System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class AudioVisualContentSegment : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal AudioVisualContentSegment() { } + public string Category { get { throw null; } } + public long EndTimeMs { get { throw null; } } + public string SegmentId { get { throw null; } } + public Azure.AI.ContentUnderstanding.ContentSpan Span { get { throw null; } } + public long StartTimeMs { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.AudioVisualContentSegment JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.AudioVisualContentSegment PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.AudioVisualContentSegment System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.AudioVisualContentSegment System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class AzureAIContentUnderstandingContext : System.ClientModel.Primitives.ModelReaderWriterContext + { + internal AzureAIContentUnderstandingContext() { } + public static Azure.AI.ContentUnderstanding.AzureAIContentUnderstandingContext Default { get { throw null; } } + protected override bool TryGetTypeBuilderCore(System.Type type, out System.ClientModel.Primitives.ModelReaderWriterTypeBuilder builder) { throw null; } + } + public partial class BooleanField : Azure.AI.ContentUnderstanding.ContentField, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal BooleanField() { } + public bool? ValueBoolean { get { throw null; } } + protected override Azure.AI.ContentUnderstanding.ContentField JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected override Azure.AI.ContentUnderstanding.ContentField PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.BooleanField System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.BooleanField System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct ChartFormat : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public ChartFormat(string value) { throw null; } + public static Azure.AI.ContentUnderstanding.ChartFormat ChartJs { get { throw null; } } + public static Azure.AI.ContentUnderstanding.ChartFormat Markdown { get { throw null; } } + public bool Equals(Azure.AI.ContentUnderstanding.ChartFormat other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.AI.ContentUnderstanding.ChartFormat left, Azure.AI.ContentUnderstanding.ChartFormat right) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.ChartFormat (string value) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.ChartFormat? (string value) { throw null; } + public static bool operator !=(Azure.AI.ContentUnderstanding.ChartFormat left, Azure.AI.ContentUnderstanding.ChartFormat right) { throw null; } + public override string ToString() { throw null; } + } + public partial class ContentAnalyzer : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + public ContentAnalyzer() { } + public string AnalyzerId { get { throw null; } } + public string BaseAnalyzerId { get { throw null; } set { } } + public Azure.AI.ContentUnderstanding.ContentAnalyzerConfig Config { get { throw null; } set { } } + public System.DateTimeOffset CreatedAt { get { throw null; } } + public string Description { get { throw null; } set { } } + public bool? DynamicFieldSchema { get { throw null; } set { } } + public Azure.AI.ContentUnderstanding.ContentFieldSchema FieldSchema { get { throw null; } set { } } + public System.Collections.Generic.IList KnowledgeSources { get { throw null; } } + public System.DateTimeOffset LastModifiedAt { get { throw null; } } + public System.Collections.Generic.IDictionary Models { get { throw null; } } + public Azure.AI.ContentUnderstanding.ProcessingLocation? ProcessingLocation { get { throw null; } set { } } + public Azure.AI.ContentUnderstanding.ContentAnalyzerStatus Status { get { throw null; } } + public Azure.AI.ContentUnderstanding.SupportedModels SupportedModels { get { throw null; } } + public System.Collections.Generic.IDictionary Tags { get { throw null; } } + public System.Collections.Generic.IReadOnlyList Warnings { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.ContentAnalyzer JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + public static explicit operator Azure.AI.ContentUnderstanding.ContentAnalyzer (Azure.Response response) { throw null; } + public static implicit operator Azure.Core.RequestContent (Azure.AI.ContentUnderstanding.ContentAnalyzer contentAnalyzer) { throw null; } + protected virtual Azure.AI.ContentUnderstanding.ContentAnalyzer PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.ContentAnalyzer System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.ContentAnalyzer System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class ContentAnalyzerConfig : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + public ContentAnalyzerConfig() { } + public Azure.AI.ContentUnderstanding.AnnotationFormat? AnnotationFormat { get { throw null; } set { } } + public Azure.AI.ContentUnderstanding.ChartFormat? ChartFormat { get { throw null; } set { } } + public System.Collections.Generic.IDictionary ContentCategories { get { throw null; } } + public bool? DisableFaceBlurring { get { throw null; } set { } } + public bool? EnableFigureAnalysis { get { throw null; } set { } } + public bool? EnableFigureDescription { get { throw null; } set { } } + public bool? EnableFormula { get { throw null; } set { } } + public bool? EnableLayout { get { throw null; } set { } } + public bool? EnableOcr { get { throw null; } set { } } + public bool? EnableSegment { get { throw null; } set { } } + public bool? EstimateFieldSourceAndConfidence { get { throw null; } set { } } + public System.Collections.Generic.IList Locales { get { throw null; } } + public bool? OmitContent { get { throw null; } set { } } + public bool? ReturnDetails { get { throw null; } set { } } + public bool? SegmentPerPage { get { throw null; } set { } } + public Azure.AI.ContentUnderstanding.TableFormat? TableFormat { get { throw null; } set { } } + protected virtual Azure.AI.ContentUnderstanding.ContentAnalyzerConfig JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.ContentAnalyzerConfig PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.ContentAnalyzerConfig System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.ContentAnalyzerConfig System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct ContentAnalyzerStatus : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public ContentAnalyzerStatus(string value) { throw null; } + public static Azure.AI.ContentUnderstanding.ContentAnalyzerStatus Creating { get { throw null; } } + public static Azure.AI.ContentUnderstanding.ContentAnalyzerStatus Deleting { get { throw null; } } + public static Azure.AI.ContentUnderstanding.ContentAnalyzerStatus Failed { get { throw null; } } + public static Azure.AI.ContentUnderstanding.ContentAnalyzerStatus Ready { get { throw null; } } + public bool Equals(Azure.AI.ContentUnderstanding.ContentAnalyzerStatus other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.AI.ContentUnderstanding.ContentAnalyzerStatus left, Azure.AI.ContentUnderstanding.ContentAnalyzerStatus right) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.ContentAnalyzerStatus (string value) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.ContentAnalyzerStatus? (string value) { throw null; } + public static bool operator !=(Azure.AI.ContentUnderstanding.ContentAnalyzerStatus left, Azure.AI.ContentUnderstanding.ContentAnalyzerStatus right) { throw null; } + public override string ToString() { throw null; } + } + public partial class ContentCategory : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + public ContentCategory() { } + public Azure.AI.ContentUnderstanding.ContentAnalyzer Analyzer { get { throw null; } set { } } + public string AnalyzerId { get { throw null; } set { } } + public string Description { get { throw null; } set { } } + protected virtual Azure.AI.ContentUnderstanding.ContentCategory JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.ContentCategory PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.ContentCategory System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.ContentCategory System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public abstract partial class ContentField : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal ContentField() { } + public float? Confidence { get { throw null; } } + public string Source { get { throw null; } } + public System.Collections.Generic.IList Spans { get { throw null; } } + public object Value { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.ContentField JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.ContentField PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.ContentField System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.ContentField System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class ContentFieldDefinition : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + public ContentFieldDefinition() { } + public string Description { get { throw null; } set { } } + public System.Collections.Generic.IList Enum { get { throw null; } } + public System.Collections.Generic.IDictionary EnumDescriptions { get { throw null; } } + public bool? EstimateSourceAndConfidence { get { throw null; } set { } } + public System.Collections.Generic.IList Examples { get { throw null; } } + public Azure.AI.ContentUnderstanding.ContentFieldDefinition ItemDefinition { get { throw null; } set { } } + public Azure.AI.ContentUnderstanding.GenerationMethod? Method { get { throw null; } set { } } + public System.Collections.Generic.IDictionary Properties { get { throw null; } } + public string Ref { get { throw null; } set { } } + public Azure.AI.ContentUnderstanding.ContentFieldType? Type { get { throw null; } set { } } + protected virtual Azure.AI.ContentUnderstanding.ContentFieldDefinition JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.ContentFieldDefinition PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.ContentFieldDefinition System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.ContentFieldDefinition System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class ContentFieldSchema : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + public ContentFieldSchema(System.Collections.Generic.IDictionary fields) { } + public System.Collections.Generic.IDictionary Definitions { get { throw null; } } + public string Description { get { throw null; } set { } } + public System.Collections.Generic.IDictionary Fields { get { throw null; } } + public string Name { get { throw null; } set { } } + protected virtual Azure.AI.ContentUnderstanding.ContentFieldSchema JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.ContentFieldSchema PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.ContentFieldSchema System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.ContentFieldSchema System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct ContentFieldType : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public ContentFieldType(string value) { throw null; } + public static Azure.AI.ContentUnderstanding.ContentFieldType Array { get { throw null; } } + public static Azure.AI.ContentUnderstanding.ContentFieldType Boolean { get { throw null; } } + public static Azure.AI.ContentUnderstanding.ContentFieldType Date { get { throw null; } } + public static Azure.AI.ContentUnderstanding.ContentFieldType Integer { get { throw null; } } + public static Azure.AI.ContentUnderstanding.ContentFieldType Json { get { throw null; } } + public static Azure.AI.ContentUnderstanding.ContentFieldType Number { get { throw null; } } + public static Azure.AI.ContentUnderstanding.ContentFieldType Object { get { throw null; } } + public static Azure.AI.ContentUnderstanding.ContentFieldType String { get { throw null; } } + public static Azure.AI.ContentUnderstanding.ContentFieldType Time { get { throw null; } } + public bool Equals(Azure.AI.ContentUnderstanding.ContentFieldType other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.AI.ContentUnderstanding.ContentFieldType left, Azure.AI.ContentUnderstanding.ContentFieldType right) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.ContentFieldType (string value) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.ContentFieldType? (string value) { throw null; } + public static bool operator !=(Azure.AI.ContentUnderstanding.ContentFieldType left, Azure.AI.ContentUnderstanding.ContentFieldType right) { throw null; } + public override string ToString() { throw null; } + } + public partial class ContentSpan : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal ContentSpan() { } + public int Length { get { throw null; } } + public int Offset { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.ContentSpan JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.ContentSpan PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.ContentSpan System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.ContentSpan System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class ContentUnderstandingClient + { + protected ContentUnderstandingClient() { } + public ContentUnderstandingClient(System.Uri endpoint, Azure.AzureKeyCredential credential) { } + public ContentUnderstandingClient(System.Uri endpoint, Azure.AzureKeyCredential credential, Azure.AI.ContentUnderstanding.ContentUnderstandingClientOptions options) { } + public ContentUnderstandingClient(System.Uri endpoint, Azure.Core.TokenCredential credential) { } + public ContentUnderstandingClient(System.Uri endpoint, Azure.Core.TokenCredential credential, Azure.AI.ContentUnderstanding.ContentUnderstandingClientOptions options) { } + public virtual Azure.Core.Pipeline.HttpPipeline Pipeline { get { throw null; } } + public virtual Azure.Operation Analyze(Azure.WaitUntil waitUntil, string analyzerId, Azure.Core.RequestContent content, string stringEncoding = null, string processingLocation = null, Azure.RequestContext context = null) { throw null; } + public virtual Azure.AI.ContentUnderstanding.AnalyzeResultOperation Analyze(Azure.WaitUntil waitUntil, string analyzerId, System.Collections.Generic.IEnumerable? inputs = null, System.Collections.Generic.IDictionary? modelDeployments = null, Azure.AI.ContentUnderstanding.ProcessingLocation? processingLocation = default(Azure.AI.ContentUnderstanding.ProcessingLocation?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> AnalyzeAsync(Azure.WaitUntil waitUntil, string analyzerId, Azure.Core.RequestContent content, string stringEncoding = null, string processingLocation = null, Azure.RequestContext context = null) { throw null; } + public virtual System.Threading.Tasks.Task AnalyzeAsync(Azure.WaitUntil waitUntil, string analyzerId, System.Collections.Generic.IEnumerable? inputs = null, System.Collections.Generic.IDictionary? modelDeployments = null, Azure.AI.ContentUnderstanding.ProcessingLocation? processingLocation = default(Azure.AI.ContentUnderstanding.ProcessingLocation?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Operation AnalyzeBinary(Azure.WaitUntil waitUntil, string analyzerId, string contentType, Azure.Core.RequestContent content, string stringEncoding = null, string processingLocation = null, string inputRange = null, Azure.RequestContext context = null) { throw null; } + public virtual Azure.AI.ContentUnderstanding.AnalyzeResultOperation AnalyzeBinary(Azure.WaitUntil waitUntil, string analyzerId, string contentType, System.BinaryData binaryInput, string? stringEncoding = null, Azure.AI.ContentUnderstanding.ProcessingLocation? processingLocation = default(Azure.AI.ContentUnderstanding.ProcessingLocation?), string? inputRange = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> AnalyzeBinaryAsync(Azure.WaitUntil waitUntil, string analyzerId, string contentType, Azure.Core.RequestContent content, string stringEncoding = null, string processingLocation = null, string inputRange = null, Azure.RequestContext context = null) { throw null; } + public virtual System.Threading.Tasks.Task AnalyzeBinaryAsync(Azure.WaitUntil waitUntil, string analyzerId, string contentType, System.BinaryData binaryInput, string? stringEncoding = null, Azure.AI.ContentUnderstanding.ProcessingLocation? processingLocation = default(Azure.AI.ContentUnderstanding.ProcessingLocation?), string? inputRange = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Operation CopyAnalyzer(Azure.WaitUntil waitUntil, string analyzerId, Azure.Core.RequestContent content, bool? allowReplace = default(bool?), Azure.RequestContext context = null) { throw null; } + public virtual Azure.Operation CopyAnalyzer(Azure.WaitUntil waitUntil, string analyzerId, string sourceAnalyzerId, string sourceAzureResourceId = null, string sourceRegion = null, bool? allowReplace = default(bool?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> CopyAnalyzerAsync(Azure.WaitUntil waitUntil, string analyzerId, Azure.Core.RequestContent content, bool? allowReplace = default(bool?), Azure.RequestContext context = null) { throw null; } + public virtual System.Threading.Tasks.Task> CopyAnalyzerAsync(Azure.WaitUntil waitUntil, string analyzerId, string sourceAnalyzerId, string sourceAzureResourceId = null, string sourceRegion = null, bool? allowReplace = default(bool?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Operation CreateAnalyzer(Azure.WaitUntil waitUntil, string analyzerId, Azure.AI.ContentUnderstanding.ContentAnalyzer resource, bool? allowReplace = default(bool?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Operation CreateAnalyzer(Azure.WaitUntil waitUntil, string analyzerId, Azure.Core.RequestContent content, bool? allowReplace = default(bool?), Azure.RequestContext context = null) { throw null; } + public virtual System.Threading.Tasks.Task> CreateAnalyzerAsync(Azure.WaitUntil waitUntil, string analyzerId, Azure.AI.ContentUnderstanding.ContentAnalyzer resource, bool? allowReplace = default(bool?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> CreateAnalyzerAsync(Azure.WaitUntil waitUntil, string analyzerId, Azure.Core.RequestContent content, bool? allowReplace = default(bool?), Azure.RequestContext context = null) { throw null; } + public virtual Azure.Response DeleteAnalyzer(string analyzerId, Azure.RequestContext context) { throw null; } + public virtual Azure.Response DeleteAnalyzer(string analyzerId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task DeleteAnalyzerAsync(string analyzerId, Azure.RequestContext context) { throw null; } + public virtual System.Threading.Tasks.Task DeleteAnalyzerAsync(string analyzerId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response DeleteResult(string operationId, Azure.RequestContext context) { throw null; } + public virtual Azure.Response DeleteResult(string operationId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task DeleteResultAsync(string operationId, Azure.RequestContext context) { throw null; } + public virtual System.Threading.Tasks.Task DeleteResultAsync(string operationId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response GetAnalyzer(string analyzerId, Azure.RequestContext context) { throw null; } + public virtual Azure.Response GetAnalyzer(string analyzerId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task GetAnalyzerAsync(string analyzerId, Azure.RequestContext context) { throw null; } + public virtual System.Threading.Tasks.Task> GetAnalyzerAsync(string analyzerId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Pageable GetAnalyzers(Azure.RequestContext context) { throw null; } + public virtual Azure.Pageable GetAnalyzers(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.AsyncPageable GetAnalyzersAsync(Azure.RequestContext context) { throw null; } + public virtual Azure.AsyncPageable GetAnalyzersAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response GetDefaults(Azure.RequestContext context) { throw null; } + public virtual Azure.Response GetDefaults(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task GetDefaultsAsync(Azure.RequestContext context) { throw null; } + public virtual System.Threading.Tasks.Task> GetDefaultsAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response GetResultFile(string operationId, string path, Azure.RequestContext context) { throw null; } + public virtual Azure.Response GetResultFile(string operationId, string path, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task GetResultFileAsync(string operationId, string path, Azure.RequestContext context) { throw null; } + public virtual System.Threading.Tasks.Task> GetResultFileAsync(string operationId, string path, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response GrantCopyAuthorization(string analyzerId, Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } + public virtual Azure.Response GrantCopyAuthorization(string analyzerId, string targetAzureResourceId, string targetRegion = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task GrantCopyAuthorizationAsync(string analyzerId, Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } + public virtual System.Threading.Tasks.Task> GrantCopyAuthorizationAsync(string analyzerId, string targetAzureResourceId, string targetRegion = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response UpdateAnalyzer(string analyzerId, Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } + public virtual System.Threading.Tasks.Task UpdateAnalyzerAsync(string analyzerId, Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } + public virtual Azure.Response UpdateDefaults(Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } + public virtual System.Threading.Tasks.Task UpdateDefaultsAsync(Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } + } + public static partial class ContentUnderstandingClientExtensions + { + public static Azure.Response UpdateAnalyzer(this Azure.AI.ContentUnderstanding.ContentUnderstandingClient client, string analyzerId, Azure.AI.ContentUnderstanding.ContentAnalyzer resource, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public static System.Threading.Tasks.Task UpdateAnalyzerAsync(this Azure.AI.ContentUnderstanding.ContentUnderstandingClient client, string analyzerId, Azure.AI.ContentUnderstanding.ContentAnalyzer resource, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public static Azure.Response UpdateDefaults(this Azure.AI.ContentUnderstanding.ContentUnderstandingClient client, System.Collections.Generic.IDictionary modelDeployments, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public static System.Threading.Tasks.Task> UpdateDefaultsAsync(this Azure.AI.ContentUnderstanding.ContentUnderstandingClient client, System.Collections.Generic.IDictionary modelDeployments, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + } + public partial class ContentUnderstandingClientOptions : Azure.Core.ClientOptions + { + public ContentUnderstandingClientOptions(Azure.AI.ContentUnderstanding.ContentUnderstandingClientOptions.ServiceVersion version = Azure.AI.ContentUnderstanding.ContentUnderstandingClientOptions.ServiceVersion.V2025_11_01) { } + public enum ServiceVersion + { + V2025_11_01 = 1, + } + } + public partial class ContentUnderstandingDefaults : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal ContentUnderstandingDefaults() { } + public System.Collections.Generic.IDictionary ModelDeployments { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.ContentUnderstandingDefaults JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + public static explicit operator Azure.AI.ContentUnderstanding.ContentUnderstandingDefaults (Azure.Response response) { throw null; } + protected virtual Azure.AI.ContentUnderstanding.ContentUnderstandingDefaults PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.ContentUnderstandingDefaults System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.ContentUnderstandingDefaults System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public static partial class ContentUnderstandingModelFactory + { + public static Azure.AI.ContentUnderstanding.AnalyzeInput AnalyzeInput(System.Uri url = null, System.BinaryData data = null, string name = null, string mimeType = null, string inputRange = null) { throw null; } + public static Azure.AI.ContentUnderstanding.AnalyzeResult AnalyzeResult(string analyzerId = null, string apiVersion = null, System.DateTimeOffset? createdAt = default(System.DateTimeOffset?), System.Collections.Generic.IEnumerable warnings = null, string stringEncoding = null, System.Collections.Generic.IEnumerable contents = null) { throw null; } + public static Azure.AI.ContentUnderstanding.ArrayField ArrayField(System.Collections.Generic.IEnumerable spans = null, float? confidence = default(float?), string source = null, System.Collections.Generic.IEnumerable valueArray = null) { throw null; } + public static Azure.AI.ContentUnderstanding.AudioVisualContent AudioVisualContent(string mimeType = null, string analyzerId = null, string category = null, string path = null, string markdown = null, System.Collections.Generic.IDictionary fields = null, long startTimeMs = (long)0, long endTimeMs = (long)0, int? width = default(int?), int? height = default(int?), System.Collections.Generic.IEnumerable cameraShotTimesMs = null, System.Collections.Generic.IEnumerable keyFrameTimesMs = null, System.Collections.Generic.IEnumerable transcriptPhrases = null, System.Collections.Generic.IEnumerable segments = null) { throw null; } + public static Azure.AI.ContentUnderstanding.AudioVisualContentSegment AudioVisualContentSegment(string segmentId = null, string category = null, Azure.AI.ContentUnderstanding.ContentSpan span = null, long startTimeMs = (long)0, long endTimeMs = (long)0) { throw null; } + public static Azure.AI.ContentUnderstanding.BooleanField BooleanField(System.Collections.Generic.IEnumerable spans = null, float? confidence = default(float?), string source = null, bool? valueBoolean = default(bool?)) { throw null; } + public static Azure.AI.ContentUnderstanding.ContentAnalyzer ContentAnalyzer(string analyzerId = null, string description = null, System.Collections.Generic.IDictionary tags = null, Azure.AI.ContentUnderstanding.ContentAnalyzerStatus status = default(Azure.AI.ContentUnderstanding.ContentAnalyzerStatus), System.DateTimeOffset createdAt = default(System.DateTimeOffset), System.DateTimeOffset lastModifiedAt = default(System.DateTimeOffset), System.Collections.Generic.IEnumerable warnings = null, string baseAnalyzerId = null, Azure.AI.ContentUnderstanding.ContentAnalyzerConfig config = null, Azure.AI.ContentUnderstanding.ContentFieldSchema fieldSchema = null, bool? dynamicFieldSchema = default(bool?), Azure.AI.ContentUnderstanding.ProcessingLocation? processingLocation = default(Azure.AI.ContentUnderstanding.ProcessingLocation?), System.Collections.Generic.IEnumerable knowledgeSources = null, System.Collections.Generic.IDictionary models = null, Azure.AI.ContentUnderstanding.SupportedModels supportedModels = null) { throw null; } + public static Azure.AI.ContentUnderstanding.ContentAnalyzerConfig ContentAnalyzerConfig(bool? returnDetails = default(bool?), System.Collections.Generic.IEnumerable locales = null, bool? enableOcr = default(bool?), bool? enableLayout = default(bool?), bool? enableFigureDescription = default(bool?), bool? enableFigureAnalysis = default(bool?), bool? enableFormula = default(bool?), Azure.AI.ContentUnderstanding.TableFormat? tableFormat = default(Azure.AI.ContentUnderstanding.TableFormat?), Azure.AI.ContentUnderstanding.ChartFormat? chartFormat = default(Azure.AI.ContentUnderstanding.ChartFormat?), Azure.AI.ContentUnderstanding.AnnotationFormat? annotationFormat = default(Azure.AI.ContentUnderstanding.AnnotationFormat?), bool? disableFaceBlurring = default(bool?), bool? estimateFieldSourceAndConfidence = default(bool?), System.Collections.Generic.IDictionary contentCategories = null, bool? enableSegment = default(bool?), bool? segmentPerPage = default(bool?), bool? omitContent = default(bool?)) { throw null; } + public static Azure.AI.ContentUnderstanding.ContentCategory ContentCategory(string description = null, string analyzerId = null, Azure.AI.ContentUnderstanding.ContentAnalyzer analyzer = null) { throw null; } + public static Azure.AI.ContentUnderstanding.ContentField ContentField(string type = null, System.Collections.Generic.IEnumerable spans = null, float? confidence = default(float?), string source = null) { throw null; } + public static Azure.AI.ContentUnderstanding.ContentFieldDefinition ContentFieldDefinition(Azure.AI.ContentUnderstanding.GenerationMethod? method = default(Azure.AI.ContentUnderstanding.GenerationMethod?), Azure.AI.ContentUnderstanding.ContentFieldType? type = default(Azure.AI.ContentUnderstanding.ContentFieldType?), string description = null, Azure.AI.ContentUnderstanding.ContentFieldDefinition itemDefinition = null, System.Collections.Generic.IDictionary properties = null, System.Collections.Generic.IEnumerable examples = null, System.Collections.Generic.IEnumerable @enum = null, System.Collections.Generic.IDictionary enumDescriptions = null, string @ref = null, bool? estimateSourceAndConfidence = default(bool?)) { throw null; } + public static Azure.AI.ContentUnderstanding.ContentFieldSchema ContentFieldSchema(string name = null, string description = null, System.Collections.Generic.IDictionary fields = null, System.Collections.Generic.IDictionary definitions = null) { throw null; } + public static Azure.AI.ContentUnderstanding.ContentSpan ContentSpan(int offset = 0, int length = 0) { throw null; } + public static Azure.AI.ContentUnderstanding.ContentUnderstandingDefaults ContentUnderstandingDefaults(System.Collections.Generic.IDictionary modelDeployments = null) { throw null; } + public static Azure.AI.ContentUnderstanding.CopyAuthorization CopyAuthorization(string source = null, string targetAzureResourceId = null, System.DateTimeOffset expiresAt = default(System.DateTimeOffset)) { throw null; } + public static Azure.AI.ContentUnderstanding.DateField DateField(System.Collections.Generic.IEnumerable spans = null, float? confidence = default(float?), string source = null, System.DateTimeOffset? valueDate = default(System.DateTimeOffset?)) { throw null; } + public static Azure.AI.ContentUnderstanding.DocumentAnnotation DocumentAnnotation(string id = null, Azure.AI.ContentUnderstanding.DocumentAnnotationKind kind = default(Azure.AI.ContentUnderstanding.DocumentAnnotationKind), System.Collections.Generic.IEnumerable spans = null, string source = null, System.Collections.Generic.IEnumerable comments = null, string author = null, System.DateTimeOffset? createdAt = default(System.DateTimeOffset?), System.DateTimeOffset? lastModifiedAt = default(System.DateTimeOffset?), System.Collections.Generic.IEnumerable tags = null) { throw null; } + public static Azure.AI.ContentUnderstanding.DocumentAnnotationComment DocumentAnnotationComment(string message = null, string author = null, System.DateTimeOffset? createdAt = default(System.DateTimeOffset?), System.DateTimeOffset? lastModifiedAt = default(System.DateTimeOffset?), System.Collections.Generic.IEnumerable tags = null) { throw null; } + public static Azure.AI.ContentUnderstanding.DocumentBarcode DocumentBarcode(Azure.AI.ContentUnderstanding.DocumentBarcodeKind kind = default(Azure.AI.ContentUnderstanding.DocumentBarcodeKind), string value = null, string source = null, Azure.AI.ContentUnderstanding.ContentSpan span = null, float? confidence = default(float?)) { throw null; } + public static Azure.AI.ContentUnderstanding.DocumentCaption DocumentCaption(string content = null, string source = null, Azure.AI.ContentUnderstanding.ContentSpan span = null, System.Collections.Generic.IEnumerable elements = null) { throw null; } + public static Azure.AI.ContentUnderstanding.DocumentChartFigure DocumentChartFigure(string id = null, string source = null, Azure.AI.ContentUnderstanding.ContentSpan span = null, System.Collections.Generic.IEnumerable elements = null, Azure.AI.ContentUnderstanding.DocumentCaption caption = null, System.Collections.Generic.IEnumerable footnotes = null, string description = null, Azure.AI.ContentUnderstanding.SemanticRole? role = default(Azure.AI.ContentUnderstanding.SemanticRole?), System.Collections.Generic.IDictionary content = null) { throw null; } + public static Azure.AI.ContentUnderstanding.DocumentContent DocumentContent(string mimeType = null, string analyzerId = null, string category = null, string path = null, string markdown = null, System.Collections.Generic.IDictionary fields = null, int startPageNumber = 0, int endPageNumber = 0, Azure.AI.ContentUnderstanding.LengthUnit? unit = default(Azure.AI.ContentUnderstanding.LengthUnit?), System.Collections.Generic.IEnumerable pages = null, System.Collections.Generic.IEnumerable paragraphs = null, System.Collections.Generic.IEnumerable sections = null, System.Collections.Generic.IEnumerable tables = null, System.Collections.Generic.IEnumerable figures = null, System.Collections.Generic.IEnumerable annotations = null, System.Collections.Generic.IEnumerable hyperlinks = null, System.Collections.Generic.IEnumerable segments = null) { throw null; } + public static Azure.AI.ContentUnderstanding.DocumentContentSegment DocumentContentSegment(string segmentId = null, string category = null, Azure.AI.ContentUnderstanding.ContentSpan span = null, int startPageNumber = 0, int endPageNumber = 0) { throw null; } + public static Azure.AI.ContentUnderstanding.DocumentFigure DocumentFigure(string kind = null, string id = null, string source = null, Azure.AI.ContentUnderstanding.ContentSpan span = null, System.Collections.Generic.IEnumerable elements = null, Azure.AI.ContentUnderstanding.DocumentCaption caption = null, System.Collections.Generic.IEnumerable footnotes = null, string description = null, Azure.AI.ContentUnderstanding.SemanticRole? role = default(Azure.AI.ContentUnderstanding.SemanticRole?)) { throw null; } + public static Azure.AI.ContentUnderstanding.DocumentFootnote DocumentFootnote(string content = null, string source = null, Azure.AI.ContentUnderstanding.ContentSpan span = null, System.Collections.Generic.IEnumerable elements = null) { throw null; } + public static Azure.AI.ContentUnderstanding.DocumentFormula DocumentFormula(Azure.AI.ContentUnderstanding.DocumentFormulaKind kind = default(Azure.AI.ContentUnderstanding.DocumentFormulaKind), string value = null, string source = null, Azure.AI.ContentUnderstanding.ContentSpan span = null, float? confidence = default(float?)) { throw null; } + public static Azure.AI.ContentUnderstanding.DocumentHyperlink DocumentHyperlink(string content = null, string url = null, Azure.AI.ContentUnderstanding.ContentSpan span = null, string source = null) { throw null; } + public static Azure.AI.ContentUnderstanding.DocumentLine DocumentLine(string content = null, string source = null, Azure.AI.ContentUnderstanding.ContentSpan span = null) { throw null; } + public static Azure.AI.ContentUnderstanding.DocumentMermaidFigure DocumentMermaidFigure(string id = null, string source = null, Azure.AI.ContentUnderstanding.ContentSpan span = null, System.Collections.Generic.IEnumerable elements = null, Azure.AI.ContentUnderstanding.DocumentCaption caption = null, System.Collections.Generic.IEnumerable footnotes = null, string description = null, Azure.AI.ContentUnderstanding.SemanticRole? role = default(Azure.AI.ContentUnderstanding.SemanticRole?), string content = null) { throw null; } + public static Azure.AI.ContentUnderstanding.DocumentPage DocumentPage(int pageNumber = 0, float? width = default(float?), float? height = default(float?), System.Collections.Generic.IEnumerable spans = null, float? angle = default(float?), System.Collections.Generic.IEnumerable words = null, System.Collections.Generic.IEnumerable lines = null, System.Collections.Generic.IEnumerable barcodes = null, System.Collections.Generic.IEnumerable formulas = null) { throw null; } + public static Azure.AI.ContentUnderstanding.DocumentParagraph DocumentParagraph(Azure.AI.ContentUnderstanding.SemanticRole? role = default(Azure.AI.ContentUnderstanding.SemanticRole?), string content = null, string source = null, Azure.AI.ContentUnderstanding.ContentSpan span = null) { throw null; } + public static Azure.AI.ContentUnderstanding.DocumentSection DocumentSection(Azure.AI.ContentUnderstanding.ContentSpan span = null, System.Collections.Generic.IEnumerable elements = null) { throw null; } + public static Azure.AI.ContentUnderstanding.DocumentTable DocumentTable(int rowCount = 0, int columnCount = 0, System.Collections.Generic.IEnumerable cells = null, string source = null, Azure.AI.ContentUnderstanding.ContentSpan span = null, Azure.AI.ContentUnderstanding.DocumentCaption caption = null, System.Collections.Generic.IEnumerable footnotes = null, Azure.AI.ContentUnderstanding.SemanticRole? role = default(Azure.AI.ContentUnderstanding.SemanticRole?)) { throw null; } + public static Azure.AI.ContentUnderstanding.DocumentTableCell DocumentTableCell(Azure.AI.ContentUnderstanding.DocumentTableCellKind? kind = default(Azure.AI.ContentUnderstanding.DocumentTableCellKind?), int rowIndex = 0, int columnIndex = 0, int? rowSpan = default(int?), int? columnSpan = default(int?), string content = null, string source = null, Azure.AI.ContentUnderstanding.ContentSpan span = null, System.Collections.Generic.IEnumerable elements = null) { throw null; } + public static Azure.AI.ContentUnderstanding.DocumentWord DocumentWord(string content = null, string source = null, Azure.AI.ContentUnderstanding.ContentSpan span = null, float? confidence = default(float?)) { throw null; } + public static Azure.AI.ContentUnderstanding.IntegerField IntegerField(System.Collections.Generic.IEnumerable spans = null, float? confidence = default(float?), string source = null, long? valueInteger = default(long?)) { throw null; } + public static Azure.AI.ContentUnderstanding.JsonField JsonField(System.Collections.Generic.IEnumerable spans = null, float? confidence = default(float?), string source = null, System.BinaryData valueJson = null) { throw null; } + public static Azure.AI.ContentUnderstanding.KnowledgeSource KnowledgeSource(string kind = null) { throw null; } + public static Azure.AI.ContentUnderstanding.LabeledDataKnowledgeSource LabeledDataKnowledgeSource(System.Uri containerUrl = null, string prefix = null, string fileListPath = null) { throw null; } + public static Azure.AI.ContentUnderstanding.MediaContent MediaContent(string kind = null, string mimeType = null, string analyzerId = null, string category = null, string path = null, string markdown = null, System.Collections.Generic.IDictionary fields = null) { throw null; } + public static Azure.AI.ContentUnderstanding.NumberField NumberField(System.Collections.Generic.IEnumerable spans = null, float? confidence = default(float?), string source = null, double? valueNumber = default(double?)) { throw null; } + public static Azure.AI.ContentUnderstanding.ObjectField ObjectField(System.Collections.Generic.IEnumerable spans = null, float? confidence = default(float?), string source = null, System.Collections.Generic.IDictionary valueObject = null) { throw null; } + public static Azure.AI.ContentUnderstanding.StringField StringField(System.Collections.Generic.IEnumerable spans = null, float? confidence = default(float?), string source = null, string valueString = null) { throw null; } + public static Azure.AI.ContentUnderstanding.SupportedModels SupportedModels(System.Collections.Generic.IEnumerable completion = null, System.Collections.Generic.IEnumerable embedding = null) { throw null; } + public static Azure.AI.ContentUnderstanding.TimeField TimeField(System.Collections.Generic.IEnumerable spans = null, float? confidence = default(float?), string source = null, System.TimeSpan? valueTime = default(System.TimeSpan?)) { throw null; } + public static Azure.AI.ContentUnderstanding.TranscriptPhrase TranscriptPhrase(string speaker = null, long startTimeMs = (long)0, long endTimeMs = (long)0, string locale = null, string text = null, float? confidence = default(float?), Azure.AI.ContentUnderstanding.ContentSpan span = null, System.Collections.Generic.IEnumerable words = null) { throw null; } + public static Azure.AI.ContentUnderstanding.TranscriptWord TranscriptWord(long startTimeMs = (long)0, long endTimeMs = (long)0, string text = null, Azure.AI.ContentUnderstanding.ContentSpan span = null) { throw null; } + } + public partial class CopyAuthorization : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal CopyAuthorization() { } + public System.DateTimeOffset ExpiresAt { get { throw null; } } + public string Source { get { throw null; } } + public string TargetAzureResourceId { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.CopyAuthorization JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + public static explicit operator Azure.AI.ContentUnderstanding.CopyAuthorization (Azure.Response response) { throw null; } + protected virtual Azure.AI.ContentUnderstanding.CopyAuthorization PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.CopyAuthorization System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.CopyAuthorization System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class DateField : Azure.AI.ContentUnderstanding.ContentField, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal DateField() { } + public System.DateTimeOffset? ValueDate { get { throw null; } } + protected override Azure.AI.ContentUnderstanding.ContentField JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected override Azure.AI.ContentUnderstanding.ContentField PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.DateField System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.DateField System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class DocumentAnnotation : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal DocumentAnnotation() { } + public string Author { get { throw null; } } + public System.Collections.Generic.IList Comments { get { throw null; } } + public System.DateTimeOffset? CreatedAt { get { throw null; } } + public string Id { get { throw null; } } + public Azure.AI.ContentUnderstanding.DocumentAnnotationKind Kind { get { throw null; } } + public System.DateTimeOffset? LastModifiedAt { get { throw null; } } + public string Source { get { throw null; } } + public System.Collections.Generic.IList Spans { get { throw null; } } + public System.Collections.Generic.IList Tags { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.DocumentAnnotation JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.DocumentAnnotation PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.DocumentAnnotation System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.DocumentAnnotation System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class DocumentAnnotationComment : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal DocumentAnnotationComment() { } + public string Author { get { throw null; } } + public System.DateTimeOffset? CreatedAt { get { throw null; } } + public System.DateTimeOffset? LastModifiedAt { get { throw null; } } + public string Message { get { throw null; } } + public System.Collections.Generic.IList Tags { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.DocumentAnnotationComment JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.DocumentAnnotationComment PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.DocumentAnnotationComment System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.DocumentAnnotationComment System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct DocumentAnnotationKind : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public DocumentAnnotationKind(string value) { throw null; } + public static Azure.AI.ContentUnderstanding.DocumentAnnotationKind Bold { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentAnnotationKind Circle { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentAnnotationKind Highlight { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentAnnotationKind Italic { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentAnnotationKind Note { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentAnnotationKind Strikethrough { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentAnnotationKind Underline { get { throw null; } } + public bool Equals(Azure.AI.ContentUnderstanding.DocumentAnnotationKind other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.AI.ContentUnderstanding.DocumentAnnotationKind left, Azure.AI.ContentUnderstanding.DocumentAnnotationKind right) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.DocumentAnnotationKind (string value) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.DocumentAnnotationKind? (string value) { throw null; } + public static bool operator !=(Azure.AI.ContentUnderstanding.DocumentAnnotationKind left, Azure.AI.ContentUnderstanding.DocumentAnnotationKind right) { throw null; } + public override string ToString() { throw null; } + } + public partial class DocumentBarcode : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal DocumentBarcode() { } + public float? Confidence { get { throw null; } } + public Azure.AI.ContentUnderstanding.DocumentBarcodeKind Kind { get { throw null; } } + public string Source { get { throw null; } } + public Azure.AI.ContentUnderstanding.ContentSpan Span { get { throw null; } } + public string Value { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.DocumentBarcode JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.DocumentBarcode PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.DocumentBarcode System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.DocumentBarcode System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct DocumentBarcodeKind : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public DocumentBarcodeKind(string value) { throw null; } + public static Azure.AI.ContentUnderstanding.DocumentBarcodeKind Aztec { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentBarcodeKind Codabar { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentBarcodeKind Code128 { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentBarcodeKind Code39 { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentBarcodeKind Code93 { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentBarcodeKind DataBar { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentBarcodeKind DataBarExpanded { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentBarcodeKind DataMatrix { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentBarcodeKind EAN13 { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentBarcodeKind EAN8 { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentBarcodeKind ITF { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentBarcodeKind MaxiCode { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentBarcodeKind MicroQRCode { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentBarcodeKind PDF417 { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentBarcodeKind QRCode { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentBarcodeKind UPCA { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentBarcodeKind UPCE { get { throw null; } } + public bool Equals(Azure.AI.ContentUnderstanding.DocumentBarcodeKind other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.AI.ContentUnderstanding.DocumentBarcodeKind left, Azure.AI.ContentUnderstanding.DocumentBarcodeKind right) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.DocumentBarcodeKind (string value) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.DocumentBarcodeKind? (string value) { throw null; } + public static bool operator !=(Azure.AI.ContentUnderstanding.DocumentBarcodeKind left, Azure.AI.ContentUnderstanding.DocumentBarcodeKind right) { throw null; } + public override string ToString() { throw null; } + } + public partial class DocumentCaption : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal DocumentCaption() { } + public string Content { get { throw null; } } + public System.Collections.Generic.IList Elements { get { throw null; } } + public string Source { get { throw null; } } + public Azure.AI.ContentUnderstanding.ContentSpan Span { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.DocumentCaption JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.DocumentCaption PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.DocumentCaption System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.DocumentCaption System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class DocumentChartFigure : Azure.AI.ContentUnderstanding.DocumentFigure, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal DocumentChartFigure() { } + public System.Collections.Generic.IDictionary Content { get { throw null; } } + protected override Azure.AI.ContentUnderstanding.DocumentFigure JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected override Azure.AI.ContentUnderstanding.DocumentFigure PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.DocumentChartFigure System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.DocumentChartFigure System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class DocumentContent : Azure.AI.ContentUnderstanding.MediaContent, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal DocumentContent() { } + public System.Collections.Generic.IList Annotations { get { throw null; } } + public int EndPageNumber { get { throw null; } } + public System.Collections.Generic.IList Figures { get { throw null; } } + public System.Collections.Generic.IList Hyperlinks { get { throw null; } } + public Azure.AI.ContentUnderstanding.ContentField this[string fieldName] { get { throw null; } } + public System.Collections.Generic.IList Pages { get { throw null; } } + public System.Collections.Generic.IList Paragraphs { get { throw null; } } + public System.Collections.Generic.IList Sections { get { throw null; } } + public System.Collections.Generic.IList Segments { get { throw null; } } + public int StartPageNumber { get { throw null; } } + public System.Collections.Generic.IList Tables { get { throw null; } } + public Azure.AI.ContentUnderstanding.LengthUnit? Unit { get { throw null; } } + protected override Azure.AI.ContentUnderstanding.MediaContent JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected override Azure.AI.ContentUnderstanding.MediaContent PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.DocumentContent System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.DocumentContent System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class DocumentContentSegment : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal DocumentContentSegment() { } + public string Category { get { throw null; } } + public int EndPageNumber { get { throw null; } } + public string SegmentId { get { throw null; } } + public Azure.AI.ContentUnderstanding.ContentSpan Span { get { throw null; } } + public int StartPageNumber { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.DocumentContentSegment JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.DocumentContentSegment PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.DocumentContentSegment System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.DocumentContentSegment System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public abstract partial class DocumentFigure : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal DocumentFigure() { } + public Azure.AI.ContentUnderstanding.DocumentCaption Caption { get { throw null; } } + public string Description { get { throw null; } } + public System.Collections.Generic.IList Elements { get { throw null; } } + public System.Collections.Generic.IList Footnotes { get { throw null; } } + public string Id { get { throw null; } } + public Azure.AI.ContentUnderstanding.SemanticRole? Role { get { throw null; } } + public string Source { get { throw null; } } + public Azure.AI.ContentUnderstanding.ContentSpan Span { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.DocumentFigure JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.DocumentFigure PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.DocumentFigure System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.DocumentFigure System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class DocumentFootnote : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal DocumentFootnote() { } + public string Content { get { throw null; } } + public System.Collections.Generic.IList Elements { get { throw null; } } + public string Source { get { throw null; } } + public Azure.AI.ContentUnderstanding.ContentSpan Span { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.DocumentFootnote JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.DocumentFootnote PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.DocumentFootnote System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.DocumentFootnote System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class DocumentFormula : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal DocumentFormula() { } + public float? Confidence { get { throw null; } } + public Azure.AI.ContentUnderstanding.DocumentFormulaKind Kind { get { throw null; } } + public string Source { get { throw null; } } + public Azure.AI.ContentUnderstanding.ContentSpan Span { get { throw null; } } + public string Value { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.DocumentFormula JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.DocumentFormula PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.DocumentFormula System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.DocumentFormula System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct DocumentFormulaKind : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public DocumentFormulaKind(string value) { throw null; } + public static Azure.AI.ContentUnderstanding.DocumentFormulaKind Display { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentFormulaKind Inline { get { throw null; } } + public bool Equals(Azure.AI.ContentUnderstanding.DocumentFormulaKind other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.AI.ContentUnderstanding.DocumentFormulaKind left, Azure.AI.ContentUnderstanding.DocumentFormulaKind right) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.DocumentFormulaKind (string value) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.DocumentFormulaKind? (string value) { throw null; } + public static bool operator !=(Azure.AI.ContentUnderstanding.DocumentFormulaKind left, Azure.AI.ContentUnderstanding.DocumentFormulaKind right) { throw null; } + public override string ToString() { throw null; } + } + public partial class DocumentHyperlink : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal DocumentHyperlink() { } + public string Content { get { throw null; } } + public string Source { get { throw null; } } + public Azure.AI.ContentUnderstanding.ContentSpan Span { get { throw null; } } + public string Url { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.DocumentHyperlink JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.DocumentHyperlink PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.DocumentHyperlink System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.DocumentHyperlink System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class DocumentLine : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal DocumentLine() { } + public string Content { get { throw null; } } + public string Source { get { throw null; } } + public Azure.AI.ContentUnderstanding.ContentSpan Span { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.DocumentLine JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.DocumentLine PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.DocumentLine System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.DocumentLine System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class DocumentMermaidFigure : Azure.AI.ContentUnderstanding.DocumentFigure, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal DocumentMermaidFigure() { } + public string Content { get { throw null; } } + protected override Azure.AI.ContentUnderstanding.DocumentFigure JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected override Azure.AI.ContentUnderstanding.DocumentFigure PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.DocumentMermaidFigure System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.DocumentMermaidFigure System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class DocumentPage : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal DocumentPage() { } + public float? Angle { get { throw null; } } + public System.Collections.Generic.IList Barcodes { get { throw null; } } + public System.Collections.Generic.IList Formulas { get { throw null; } } + public float? Height { get { throw null; } } + public System.Collections.Generic.IList Lines { get { throw null; } } + public int PageNumber { get { throw null; } } + public System.Collections.Generic.IList Spans { get { throw null; } } + public float? Width { get { throw null; } } + public System.Collections.Generic.IList Words { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.DocumentPage JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.DocumentPage PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.DocumentPage System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.DocumentPage System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class DocumentParagraph : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal DocumentParagraph() { } + public string Content { get { throw null; } } + public Azure.AI.ContentUnderstanding.SemanticRole? Role { get { throw null; } } + public string Source { get { throw null; } } + public Azure.AI.ContentUnderstanding.ContentSpan Span { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.DocumentParagraph JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.DocumentParagraph PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.DocumentParagraph System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.DocumentParagraph System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class DocumentSection : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal DocumentSection() { } + public System.Collections.Generic.IList Elements { get { throw null; } } + public Azure.AI.ContentUnderstanding.ContentSpan Span { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.DocumentSection JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.DocumentSection PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.DocumentSection System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.DocumentSection System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class DocumentTable : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal DocumentTable() { } + public Azure.AI.ContentUnderstanding.DocumentCaption Caption { get { throw null; } } + public System.Collections.Generic.IList Cells { get { throw null; } } + public int ColumnCount { get { throw null; } } + public System.Collections.Generic.IList Footnotes { get { throw null; } } + public Azure.AI.ContentUnderstanding.SemanticRole? Role { get { throw null; } } + public int RowCount { get { throw null; } } + public string Source { get { throw null; } } + public Azure.AI.ContentUnderstanding.ContentSpan Span { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.DocumentTable JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.DocumentTable PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.DocumentTable System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.DocumentTable System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class DocumentTableCell : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal DocumentTableCell() { } + public int ColumnIndex { get { throw null; } } + public int? ColumnSpan { get { throw null; } } + public string Content { get { throw null; } } + public System.Collections.Generic.IList Elements { get { throw null; } } + public Azure.AI.ContentUnderstanding.DocumentTableCellKind? Kind { get { throw null; } } + public int RowIndex { get { throw null; } } + public int? RowSpan { get { throw null; } } + public string Source { get { throw null; } } + public Azure.AI.ContentUnderstanding.ContentSpan Span { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.DocumentTableCell JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.DocumentTableCell PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.DocumentTableCell System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.DocumentTableCell System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct DocumentTableCellKind : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public DocumentTableCellKind(string value) { throw null; } + public static Azure.AI.ContentUnderstanding.DocumentTableCellKind ColumnHeader { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentTableCellKind Content { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentTableCellKind Description { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentTableCellKind RowHeader { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentTableCellKind StubHead { get { throw null; } } + public bool Equals(Azure.AI.ContentUnderstanding.DocumentTableCellKind other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.AI.ContentUnderstanding.DocumentTableCellKind left, Azure.AI.ContentUnderstanding.DocumentTableCellKind right) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.DocumentTableCellKind (string value) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.DocumentTableCellKind? (string value) { throw null; } + public static bool operator !=(Azure.AI.ContentUnderstanding.DocumentTableCellKind left, Azure.AI.ContentUnderstanding.DocumentTableCellKind right) { throw null; } + public override string ToString() { throw null; } + } + public partial class DocumentWord : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal DocumentWord() { } + public float? Confidence { get { throw null; } } + public string Content { get { throw null; } } + public string Source { get { throw null; } } + public Azure.AI.ContentUnderstanding.ContentSpan Span { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.DocumentWord JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.DocumentWord PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.DocumentWord System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.DocumentWord System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct GenerationMethod : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public GenerationMethod(string value) { throw null; } + public static Azure.AI.ContentUnderstanding.GenerationMethod Classify { get { throw null; } } + public static Azure.AI.ContentUnderstanding.GenerationMethod Extract { get { throw null; } } + public static Azure.AI.ContentUnderstanding.GenerationMethod Generate { get { throw null; } } + public bool Equals(Azure.AI.ContentUnderstanding.GenerationMethod other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.AI.ContentUnderstanding.GenerationMethod left, Azure.AI.ContentUnderstanding.GenerationMethod right) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.GenerationMethod (string value) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.GenerationMethod? (string value) { throw null; } + public static bool operator !=(Azure.AI.ContentUnderstanding.GenerationMethod left, Azure.AI.ContentUnderstanding.GenerationMethod right) { throw null; } + public override string ToString() { throw null; } + } + public partial class IntegerField : Azure.AI.ContentUnderstanding.ContentField, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal IntegerField() { } + public long? ValueInteger { get { throw null; } } + protected override Azure.AI.ContentUnderstanding.ContentField JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected override Azure.AI.ContentUnderstanding.ContentField PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.IntegerField System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.IntegerField System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class JsonField : Azure.AI.ContentUnderstanding.ContentField, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal JsonField() { } + public System.BinaryData ValueJson { get { throw null; } } + protected override Azure.AI.ContentUnderstanding.ContentField JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected override Azure.AI.ContentUnderstanding.ContentField PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.JsonField System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.JsonField System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public abstract partial class KnowledgeSource : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal KnowledgeSource() { } + protected virtual Azure.AI.ContentUnderstanding.KnowledgeSource JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.KnowledgeSource PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.KnowledgeSource System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.KnowledgeSource System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class LabeledDataKnowledgeSource : Azure.AI.ContentUnderstanding.KnowledgeSource, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + public LabeledDataKnowledgeSource(System.Uri containerUrl, string fileListPath) { } + public System.Uri ContainerUrl { get { throw null; } set { } } + public string FileListPath { get { throw null; } set { } } + public string Prefix { get { throw null; } set { } } + protected override Azure.AI.ContentUnderstanding.KnowledgeSource JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected override Azure.AI.ContentUnderstanding.KnowledgeSource PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.LabeledDataKnowledgeSource System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.LabeledDataKnowledgeSource System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct LengthUnit : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public LengthUnit(string value) { throw null; } + public static Azure.AI.ContentUnderstanding.LengthUnit Inch { get { throw null; } } + public static Azure.AI.ContentUnderstanding.LengthUnit Pixel { get { throw null; } } + public bool Equals(Azure.AI.ContentUnderstanding.LengthUnit other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.AI.ContentUnderstanding.LengthUnit left, Azure.AI.ContentUnderstanding.LengthUnit right) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.LengthUnit (string value) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.LengthUnit? (string value) { throw null; } + public static bool operator !=(Azure.AI.ContentUnderstanding.LengthUnit left, Azure.AI.ContentUnderstanding.LengthUnit right) { throw null; } + public override string ToString() { throw null; } + } + public abstract partial class MediaContent : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal MediaContent() { } + public string AnalyzerId { get { throw null; } } + public string Category { get { throw null; } } + public System.Collections.Generic.IDictionary Fields { get { throw null; } } + public string Markdown { get { throw null; } } + public string MimeType { get { throw null; } } + public string Path { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.MediaContent JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.MediaContent PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.MediaContent System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.MediaContent System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class NumberField : Azure.AI.ContentUnderstanding.ContentField, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal NumberField() { } + public double? ValueNumber { get { throw null; } } + protected override Azure.AI.ContentUnderstanding.ContentField JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected override Azure.AI.ContentUnderstanding.ContentField PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.NumberField System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.NumberField System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class ObjectField : Azure.AI.ContentUnderstanding.ContentField, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal ObjectField() { } + public Azure.AI.ContentUnderstanding.ContentField this[string fieldName] { get { throw null; } } + public System.Collections.Generic.IDictionary ValueObject { get { throw null; } } + protected override Azure.AI.ContentUnderstanding.ContentField JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected override Azure.AI.ContentUnderstanding.ContentField PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.ObjectField System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.ObjectField System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct ProcessingLocation : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public ProcessingLocation(string value) { throw null; } + public static Azure.AI.ContentUnderstanding.ProcessingLocation DataZone { get { throw null; } } + public static Azure.AI.ContentUnderstanding.ProcessingLocation Geography { get { throw null; } } + public static Azure.AI.ContentUnderstanding.ProcessingLocation Global { get { throw null; } } + public bool Equals(Azure.AI.ContentUnderstanding.ProcessingLocation other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.AI.ContentUnderstanding.ProcessingLocation left, Azure.AI.ContentUnderstanding.ProcessingLocation right) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.ProcessingLocation (string value) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.ProcessingLocation? (string value) { throw null; } + public static bool operator !=(Azure.AI.ContentUnderstanding.ProcessingLocation left, Azure.AI.ContentUnderstanding.ProcessingLocation right) { throw null; } + public override string ToString() { throw null; } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct SemanticRole : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public SemanticRole(string value) { throw null; } + public static Azure.AI.ContentUnderstanding.SemanticRole Footnote { get { throw null; } } + public static Azure.AI.ContentUnderstanding.SemanticRole FormulaBlock { get { throw null; } } + public static Azure.AI.ContentUnderstanding.SemanticRole PageFooter { get { throw null; } } + public static Azure.AI.ContentUnderstanding.SemanticRole PageHeader { get { throw null; } } + public static Azure.AI.ContentUnderstanding.SemanticRole PageNumber { get { throw null; } } + public static Azure.AI.ContentUnderstanding.SemanticRole SectionHeading { get { throw null; } } + public static Azure.AI.ContentUnderstanding.SemanticRole Title { get { throw null; } } + public bool Equals(Azure.AI.ContentUnderstanding.SemanticRole other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.AI.ContentUnderstanding.SemanticRole left, Azure.AI.ContentUnderstanding.SemanticRole right) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.SemanticRole (string value) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.SemanticRole? (string value) { throw null; } + public static bool operator !=(Azure.AI.ContentUnderstanding.SemanticRole left, Azure.AI.ContentUnderstanding.SemanticRole right) { throw null; } + public override string ToString() { throw null; } + } + public partial class StringField : Azure.AI.ContentUnderstanding.ContentField, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal StringField() { } + public string ValueString { get { throw null; } } + protected override Azure.AI.ContentUnderstanding.ContentField JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected override Azure.AI.ContentUnderstanding.ContentField PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.StringField System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.StringField System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class SupportedModels : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal SupportedModels() { } + public System.Collections.Generic.IList Completion { get { throw null; } } + public System.Collections.Generic.IList Embedding { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.SupportedModels JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.SupportedModels PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.SupportedModels System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.SupportedModels System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct TableFormat : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public TableFormat(string value) { throw null; } + public static Azure.AI.ContentUnderstanding.TableFormat Html { get { throw null; } } + public static Azure.AI.ContentUnderstanding.TableFormat Markdown { get { throw null; } } + public bool Equals(Azure.AI.ContentUnderstanding.TableFormat other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.AI.ContentUnderstanding.TableFormat left, Azure.AI.ContentUnderstanding.TableFormat right) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.TableFormat (string value) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.TableFormat? (string value) { throw null; } + public static bool operator !=(Azure.AI.ContentUnderstanding.TableFormat left, Azure.AI.ContentUnderstanding.TableFormat right) { throw null; } + public override string ToString() { throw null; } + } + public partial class TimeField : Azure.AI.ContentUnderstanding.ContentField, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal TimeField() { } + public System.TimeSpan? ValueTime { get { throw null; } } + protected override Azure.AI.ContentUnderstanding.ContentField JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected override Azure.AI.ContentUnderstanding.ContentField PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.TimeField System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.TimeField System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class TranscriptPhrase : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal TranscriptPhrase() { } + public float? Confidence { get { throw null; } } + public long EndTimeMs { get { throw null; } } + public string Locale { get { throw null; } } + public Azure.AI.ContentUnderstanding.ContentSpan Span { get { throw null; } } + public string Speaker { get { throw null; } } + public long StartTimeMs { get { throw null; } } + public string Text { get { throw null; } } + public System.Collections.Generic.IList Words { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.TranscriptPhrase JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.TranscriptPhrase PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.TranscriptPhrase System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.TranscriptPhrase System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class TranscriptWord : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal TranscriptWord() { } + public long EndTimeMs { get { throw null; } } + public Azure.AI.ContentUnderstanding.ContentSpan Span { get { throw null; } } + public long StartTimeMs { get { throw null; } } + public string Text { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.TranscriptWord JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.TranscriptWord PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.TranscriptWord System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.TranscriptWord System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } +} +namespace Microsoft.Extensions.Azure +{ + public static partial class ContentUnderstandingClientBuilderExtensions + { + public static Azure.Core.Extensions.IAzureClientBuilder AddContentUnderstandingClient(this TBuilder builder, System.Uri endpoint) where TBuilder : Azure.Core.Extensions.IAzureClientFactoryBuilderWithCredential { throw null; } + public static Azure.Core.Extensions.IAzureClientBuilder AddContentUnderstandingClient(this TBuilder builder, System.Uri endpoint, Azure.AzureKeyCredential credential) where TBuilder : Azure.Core.Extensions.IAzureClientFactoryBuilder { throw null; } + [System.Diagnostics.CodeAnalysis.RequiresDynamicCodeAttribute("Requires unreferenced code until we opt into EnableConfigurationBindingGenerator.")] + public static Azure.Core.Extensions.IAzureClientBuilder AddContentUnderstandingClient(this TBuilder builder, TConfiguration configuration) where TBuilder : Azure.Core.Extensions.IAzureClientFactoryBuilderWithConfiguration { throw null; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/api/Azure.AI.ContentUnderstanding.netstandard2.0.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/api/Azure.AI.ContentUnderstanding.netstandard2.0.cs new file mode 100644 index 000000000000..82c076a72e97 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/api/Azure.AI.ContentUnderstanding.netstandard2.0.cs @@ -0,0 +1,1275 @@ +namespace Azure.AI.ContentUnderstanding +{ + public partial class AnalyzeInput : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + public AnalyzeInput() { } + public System.BinaryData Data { get { throw null; } set { } } + public string InputRange { get { throw null; } set { } } + public string MimeType { get { throw null; } set { } } + public string Name { get { throw null; } set { } } + public System.Uri Url { get { throw null; } set { } } + protected virtual Azure.AI.ContentUnderstanding.AnalyzeInput JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.AnalyzeInput PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.AnalyzeInput System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.AnalyzeInput System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class AnalyzeResult : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal AnalyzeResult() { } + public string AnalyzerId { get { throw null; } } + public string ApiVersion { get { throw null; } } + public System.Collections.Generic.IList Contents { get { throw null; } } + public System.DateTimeOffset? CreatedAt { get { throw null; } } + public string StringEncoding { get { throw null; } } + public System.Collections.Generic.IList Warnings { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.AnalyzeResult JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.AnalyzeResult PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.AnalyzeResult System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.AnalyzeResult System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class AnalyzeResultOperation : Azure.Operation + { + protected AnalyzeResultOperation() { } + public override bool HasCompleted { get { throw null; } } + public override bool HasValue { get { throw null; } } + public override string Id { get { throw null; } } + public override Azure.AI.ContentUnderstanding.AnalyzeResult Value { get { throw null; } } + public override Azure.Response GetRawResponse() { throw null; } + public override Azure.Response UpdateStatus(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public override System.Threading.Tasks.ValueTask UpdateStatusAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public override Azure.Response WaitForCompletion(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public override Azure.Response WaitForCompletion(System.TimeSpan pollingInterval, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public override System.Threading.Tasks.ValueTask> WaitForCompletionAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public override System.Threading.Tasks.ValueTask> WaitForCompletionAsync(System.TimeSpan pollingInterval, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct AnnotationFormat : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public AnnotationFormat(string value) { throw null; } + public static Azure.AI.ContentUnderstanding.AnnotationFormat Markdown { get { throw null; } } + public static Azure.AI.ContentUnderstanding.AnnotationFormat None { get { throw null; } } + public bool Equals(Azure.AI.ContentUnderstanding.AnnotationFormat other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.AI.ContentUnderstanding.AnnotationFormat left, Azure.AI.ContentUnderstanding.AnnotationFormat right) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.AnnotationFormat (string value) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.AnnotationFormat? (string value) { throw null; } + public static bool operator !=(Azure.AI.ContentUnderstanding.AnnotationFormat left, Azure.AI.ContentUnderstanding.AnnotationFormat right) { throw null; } + public override string ToString() { throw null; } + } + public partial class ArrayField : Azure.AI.ContentUnderstanding.ContentField, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal ArrayField() { } + public int Count { get { throw null; } } + public Azure.AI.ContentUnderstanding.ContentField this[int index] { get { throw null; } } + public System.Collections.Generic.IList ValueArray { get { throw null; } } + protected override Azure.AI.ContentUnderstanding.ContentField JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected override Azure.AI.ContentUnderstanding.ContentField PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.ArrayField System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.ArrayField System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class AudioVisualContent : Azure.AI.ContentUnderstanding.MediaContent, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal AudioVisualContent() { } + public System.Collections.Generic.IList CameraShotTimesMs { get { throw null; } } + public long EndTimeMs { get { throw null; } } + public int? Height { get { throw null; } } + public System.Collections.Generic.IList KeyFrameTimesMs { get { throw null; } } + public System.Collections.Generic.IList Segments { get { throw null; } } + public long StartTimeMs { get { throw null; } } + public System.Collections.Generic.IList TranscriptPhrases { get { throw null; } } + public int? Width { get { throw null; } } + protected override Azure.AI.ContentUnderstanding.MediaContent JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected override Azure.AI.ContentUnderstanding.MediaContent PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.AudioVisualContent System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.AudioVisualContent System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class AudioVisualContentSegment : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal AudioVisualContentSegment() { } + public string Category { get { throw null; } } + public long EndTimeMs { get { throw null; } } + public string SegmentId { get { throw null; } } + public Azure.AI.ContentUnderstanding.ContentSpan Span { get { throw null; } } + public long StartTimeMs { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.AudioVisualContentSegment JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.AudioVisualContentSegment PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.AudioVisualContentSegment System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.AudioVisualContentSegment System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class AzureAIContentUnderstandingContext : System.ClientModel.Primitives.ModelReaderWriterContext + { + internal AzureAIContentUnderstandingContext() { } + public static Azure.AI.ContentUnderstanding.AzureAIContentUnderstandingContext Default { get { throw null; } } + protected override bool TryGetTypeBuilderCore(System.Type type, out System.ClientModel.Primitives.ModelReaderWriterTypeBuilder builder) { throw null; } + } + public partial class BooleanField : Azure.AI.ContentUnderstanding.ContentField, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal BooleanField() { } + public bool? ValueBoolean { get { throw null; } } + protected override Azure.AI.ContentUnderstanding.ContentField JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected override Azure.AI.ContentUnderstanding.ContentField PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.BooleanField System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.BooleanField System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct ChartFormat : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public ChartFormat(string value) { throw null; } + public static Azure.AI.ContentUnderstanding.ChartFormat ChartJs { get { throw null; } } + public static Azure.AI.ContentUnderstanding.ChartFormat Markdown { get { throw null; } } + public bool Equals(Azure.AI.ContentUnderstanding.ChartFormat other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.AI.ContentUnderstanding.ChartFormat left, Azure.AI.ContentUnderstanding.ChartFormat right) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.ChartFormat (string value) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.ChartFormat? (string value) { throw null; } + public static bool operator !=(Azure.AI.ContentUnderstanding.ChartFormat left, Azure.AI.ContentUnderstanding.ChartFormat right) { throw null; } + public override string ToString() { throw null; } + } + public partial class ContentAnalyzer : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + public ContentAnalyzer() { } + public string AnalyzerId { get { throw null; } } + public string BaseAnalyzerId { get { throw null; } set { } } + public Azure.AI.ContentUnderstanding.ContentAnalyzerConfig Config { get { throw null; } set { } } + public System.DateTimeOffset CreatedAt { get { throw null; } } + public string Description { get { throw null; } set { } } + public bool? DynamicFieldSchema { get { throw null; } set { } } + public Azure.AI.ContentUnderstanding.ContentFieldSchema FieldSchema { get { throw null; } set { } } + public System.Collections.Generic.IList KnowledgeSources { get { throw null; } } + public System.DateTimeOffset LastModifiedAt { get { throw null; } } + public System.Collections.Generic.IDictionary Models { get { throw null; } } + public Azure.AI.ContentUnderstanding.ProcessingLocation? ProcessingLocation { get { throw null; } set { } } + public Azure.AI.ContentUnderstanding.ContentAnalyzerStatus Status { get { throw null; } } + public Azure.AI.ContentUnderstanding.SupportedModels SupportedModels { get { throw null; } } + public System.Collections.Generic.IDictionary Tags { get { throw null; } } + public System.Collections.Generic.IReadOnlyList Warnings { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.ContentAnalyzer JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + public static explicit operator Azure.AI.ContentUnderstanding.ContentAnalyzer (Azure.Response response) { throw null; } + public static implicit operator Azure.Core.RequestContent (Azure.AI.ContentUnderstanding.ContentAnalyzer contentAnalyzer) { throw null; } + protected virtual Azure.AI.ContentUnderstanding.ContentAnalyzer PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.ContentAnalyzer System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.ContentAnalyzer System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class ContentAnalyzerConfig : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + public ContentAnalyzerConfig() { } + public Azure.AI.ContentUnderstanding.AnnotationFormat? AnnotationFormat { get { throw null; } set { } } + public Azure.AI.ContentUnderstanding.ChartFormat? ChartFormat { get { throw null; } set { } } + public System.Collections.Generic.IDictionary ContentCategories { get { throw null; } } + public bool? DisableFaceBlurring { get { throw null; } set { } } + public bool? EnableFigureAnalysis { get { throw null; } set { } } + public bool? EnableFigureDescription { get { throw null; } set { } } + public bool? EnableFormula { get { throw null; } set { } } + public bool? EnableLayout { get { throw null; } set { } } + public bool? EnableOcr { get { throw null; } set { } } + public bool? EnableSegment { get { throw null; } set { } } + public bool? EstimateFieldSourceAndConfidence { get { throw null; } set { } } + public System.Collections.Generic.IList Locales { get { throw null; } } + public bool? OmitContent { get { throw null; } set { } } + public bool? ReturnDetails { get { throw null; } set { } } + public bool? SegmentPerPage { get { throw null; } set { } } + public Azure.AI.ContentUnderstanding.TableFormat? TableFormat { get { throw null; } set { } } + protected virtual Azure.AI.ContentUnderstanding.ContentAnalyzerConfig JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.ContentAnalyzerConfig PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.ContentAnalyzerConfig System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.ContentAnalyzerConfig System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct ContentAnalyzerStatus : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public ContentAnalyzerStatus(string value) { throw null; } + public static Azure.AI.ContentUnderstanding.ContentAnalyzerStatus Creating { get { throw null; } } + public static Azure.AI.ContentUnderstanding.ContentAnalyzerStatus Deleting { get { throw null; } } + public static Azure.AI.ContentUnderstanding.ContentAnalyzerStatus Failed { get { throw null; } } + public static Azure.AI.ContentUnderstanding.ContentAnalyzerStatus Ready { get { throw null; } } + public bool Equals(Azure.AI.ContentUnderstanding.ContentAnalyzerStatus other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.AI.ContentUnderstanding.ContentAnalyzerStatus left, Azure.AI.ContentUnderstanding.ContentAnalyzerStatus right) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.ContentAnalyzerStatus (string value) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.ContentAnalyzerStatus? (string value) { throw null; } + public static bool operator !=(Azure.AI.ContentUnderstanding.ContentAnalyzerStatus left, Azure.AI.ContentUnderstanding.ContentAnalyzerStatus right) { throw null; } + public override string ToString() { throw null; } + } + public partial class ContentCategory : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + public ContentCategory() { } + public Azure.AI.ContentUnderstanding.ContentAnalyzer Analyzer { get { throw null; } set { } } + public string AnalyzerId { get { throw null; } set { } } + public string Description { get { throw null; } set { } } + protected virtual Azure.AI.ContentUnderstanding.ContentCategory JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.ContentCategory PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.ContentCategory System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.ContentCategory System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public abstract partial class ContentField : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal ContentField() { } + public float? Confidence { get { throw null; } } + public string Source { get { throw null; } } + public System.Collections.Generic.IList Spans { get { throw null; } } + public object Value { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.ContentField JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.ContentField PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.ContentField System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.ContentField System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class ContentFieldDefinition : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + public ContentFieldDefinition() { } + public string Description { get { throw null; } set { } } + public System.Collections.Generic.IList Enum { get { throw null; } } + public System.Collections.Generic.IDictionary EnumDescriptions { get { throw null; } } + public bool? EstimateSourceAndConfidence { get { throw null; } set { } } + public System.Collections.Generic.IList Examples { get { throw null; } } + public Azure.AI.ContentUnderstanding.ContentFieldDefinition ItemDefinition { get { throw null; } set { } } + public Azure.AI.ContentUnderstanding.GenerationMethod? Method { get { throw null; } set { } } + public System.Collections.Generic.IDictionary Properties { get { throw null; } } + public string Ref { get { throw null; } set { } } + public Azure.AI.ContentUnderstanding.ContentFieldType? Type { get { throw null; } set { } } + protected virtual Azure.AI.ContentUnderstanding.ContentFieldDefinition JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.ContentFieldDefinition PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.ContentFieldDefinition System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.ContentFieldDefinition System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class ContentFieldSchema : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + public ContentFieldSchema(System.Collections.Generic.IDictionary fields) { } + public System.Collections.Generic.IDictionary Definitions { get { throw null; } } + public string Description { get { throw null; } set { } } + public System.Collections.Generic.IDictionary Fields { get { throw null; } } + public string Name { get { throw null; } set { } } + protected virtual Azure.AI.ContentUnderstanding.ContentFieldSchema JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.ContentFieldSchema PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.ContentFieldSchema System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.ContentFieldSchema System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct ContentFieldType : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public ContentFieldType(string value) { throw null; } + public static Azure.AI.ContentUnderstanding.ContentFieldType Array { get { throw null; } } + public static Azure.AI.ContentUnderstanding.ContentFieldType Boolean { get { throw null; } } + public static Azure.AI.ContentUnderstanding.ContentFieldType Date { get { throw null; } } + public static Azure.AI.ContentUnderstanding.ContentFieldType Integer { get { throw null; } } + public static Azure.AI.ContentUnderstanding.ContentFieldType Json { get { throw null; } } + public static Azure.AI.ContentUnderstanding.ContentFieldType Number { get { throw null; } } + public static Azure.AI.ContentUnderstanding.ContentFieldType Object { get { throw null; } } + public static Azure.AI.ContentUnderstanding.ContentFieldType String { get { throw null; } } + public static Azure.AI.ContentUnderstanding.ContentFieldType Time { get { throw null; } } + public bool Equals(Azure.AI.ContentUnderstanding.ContentFieldType other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.AI.ContentUnderstanding.ContentFieldType left, Azure.AI.ContentUnderstanding.ContentFieldType right) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.ContentFieldType (string value) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.ContentFieldType? (string value) { throw null; } + public static bool operator !=(Azure.AI.ContentUnderstanding.ContentFieldType left, Azure.AI.ContentUnderstanding.ContentFieldType right) { throw null; } + public override string ToString() { throw null; } + } + public partial class ContentSpan : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal ContentSpan() { } + public int Length { get { throw null; } } + public int Offset { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.ContentSpan JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.ContentSpan PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.ContentSpan System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.ContentSpan System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class ContentUnderstandingClient + { + protected ContentUnderstandingClient() { } + public ContentUnderstandingClient(System.Uri endpoint, Azure.AzureKeyCredential credential) { } + public ContentUnderstandingClient(System.Uri endpoint, Azure.AzureKeyCredential credential, Azure.AI.ContentUnderstanding.ContentUnderstandingClientOptions options) { } + public ContentUnderstandingClient(System.Uri endpoint, Azure.Core.TokenCredential credential) { } + public ContentUnderstandingClient(System.Uri endpoint, Azure.Core.TokenCredential credential, Azure.AI.ContentUnderstanding.ContentUnderstandingClientOptions options) { } + public virtual Azure.Core.Pipeline.HttpPipeline Pipeline { get { throw null; } } + public virtual Azure.Operation Analyze(Azure.WaitUntil waitUntil, string analyzerId, Azure.Core.RequestContent content, string stringEncoding = null, string processingLocation = null, Azure.RequestContext context = null) { throw null; } + public virtual Azure.AI.ContentUnderstanding.AnalyzeResultOperation Analyze(Azure.WaitUntil waitUntil, string analyzerId, System.Collections.Generic.IEnumerable? inputs = null, System.Collections.Generic.IDictionary? modelDeployments = null, Azure.AI.ContentUnderstanding.ProcessingLocation? processingLocation = default(Azure.AI.ContentUnderstanding.ProcessingLocation?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> AnalyzeAsync(Azure.WaitUntil waitUntil, string analyzerId, Azure.Core.RequestContent content, string stringEncoding = null, string processingLocation = null, Azure.RequestContext context = null) { throw null; } + public virtual System.Threading.Tasks.Task AnalyzeAsync(Azure.WaitUntil waitUntil, string analyzerId, System.Collections.Generic.IEnumerable? inputs = null, System.Collections.Generic.IDictionary? modelDeployments = null, Azure.AI.ContentUnderstanding.ProcessingLocation? processingLocation = default(Azure.AI.ContentUnderstanding.ProcessingLocation?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Operation AnalyzeBinary(Azure.WaitUntil waitUntil, string analyzerId, string contentType, Azure.Core.RequestContent content, string stringEncoding = null, string processingLocation = null, string inputRange = null, Azure.RequestContext context = null) { throw null; } + public virtual Azure.AI.ContentUnderstanding.AnalyzeResultOperation AnalyzeBinary(Azure.WaitUntil waitUntil, string analyzerId, string contentType, System.BinaryData binaryInput, string? stringEncoding = null, Azure.AI.ContentUnderstanding.ProcessingLocation? processingLocation = default(Azure.AI.ContentUnderstanding.ProcessingLocation?), string? inputRange = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> AnalyzeBinaryAsync(Azure.WaitUntil waitUntil, string analyzerId, string contentType, Azure.Core.RequestContent content, string stringEncoding = null, string processingLocation = null, string inputRange = null, Azure.RequestContext context = null) { throw null; } + public virtual System.Threading.Tasks.Task AnalyzeBinaryAsync(Azure.WaitUntil waitUntil, string analyzerId, string contentType, System.BinaryData binaryInput, string? stringEncoding = null, Azure.AI.ContentUnderstanding.ProcessingLocation? processingLocation = default(Azure.AI.ContentUnderstanding.ProcessingLocation?), string? inputRange = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Operation CopyAnalyzer(Azure.WaitUntil waitUntil, string analyzerId, Azure.Core.RequestContent content, bool? allowReplace = default(bool?), Azure.RequestContext context = null) { throw null; } + public virtual Azure.Operation CopyAnalyzer(Azure.WaitUntil waitUntil, string analyzerId, string sourceAnalyzerId, string sourceAzureResourceId = null, string sourceRegion = null, bool? allowReplace = default(bool?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> CopyAnalyzerAsync(Azure.WaitUntil waitUntil, string analyzerId, Azure.Core.RequestContent content, bool? allowReplace = default(bool?), Azure.RequestContext context = null) { throw null; } + public virtual System.Threading.Tasks.Task> CopyAnalyzerAsync(Azure.WaitUntil waitUntil, string analyzerId, string sourceAnalyzerId, string sourceAzureResourceId = null, string sourceRegion = null, bool? allowReplace = default(bool?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Operation CreateAnalyzer(Azure.WaitUntil waitUntil, string analyzerId, Azure.AI.ContentUnderstanding.ContentAnalyzer resource, bool? allowReplace = default(bool?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Operation CreateAnalyzer(Azure.WaitUntil waitUntil, string analyzerId, Azure.Core.RequestContent content, bool? allowReplace = default(bool?), Azure.RequestContext context = null) { throw null; } + public virtual System.Threading.Tasks.Task> CreateAnalyzerAsync(Azure.WaitUntil waitUntil, string analyzerId, Azure.AI.ContentUnderstanding.ContentAnalyzer resource, bool? allowReplace = default(bool?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> CreateAnalyzerAsync(Azure.WaitUntil waitUntil, string analyzerId, Azure.Core.RequestContent content, bool? allowReplace = default(bool?), Azure.RequestContext context = null) { throw null; } + public virtual Azure.Response DeleteAnalyzer(string analyzerId, Azure.RequestContext context) { throw null; } + public virtual Azure.Response DeleteAnalyzer(string analyzerId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task DeleteAnalyzerAsync(string analyzerId, Azure.RequestContext context) { throw null; } + public virtual System.Threading.Tasks.Task DeleteAnalyzerAsync(string analyzerId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response DeleteResult(string operationId, Azure.RequestContext context) { throw null; } + public virtual Azure.Response DeleteResult(string operationId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task DeleteResultAsync(string operationId, Azure.RequestContext context) { throw null; } + public virtual System.Threading.Tasks.Task DeleteResultAsync(string operationId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response GetAnalyzer(string analyzerId, Azure.RequestContext context) { throw null; } + public virtual Azure.Response GetAnalyzer(string analyzerId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task GetAnalyzerAsync(string analyzerId, Azure.RequestContext context) { throw null; } + public virtual System.Threading.Tasks.Task> GetAnalyzerAsync(string analyzerId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Pageable GetAnalyzers(Azure.RequestContext context) { throw null; } + public virtual Azure.Pageable GetAnalyzers(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.AsyncPageable GetAnalyzersAsync(Azure.RequestContext context) { throw null; } + public virtual Azure.AsyncPageable GetAnalyzersAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response GetDefaults(Azure.RequestContext context) { throw null; } + public virtual Azure.Response GetDefaults(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task GetDefaultsAsync(Azure.RequestContext context) { throw null; } + public virtual System.Threading.Tasks.Task> GetDefaultsAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response GetResultFile(string operationId, string path, Azure.RequestContext context) { throw null; } + public virtual Azure.Response GetResultFile(string operationId, string path, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task GetResultFileAsync(string operationId, string path, Azure.RequestContext context) { throw null; } + public virtual System.Threading.Tasks.Task> GetResultFileAsync(string operationId, string path, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response GrantCopyAuthorization(string analyzerId, Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } + public virtual Azure.Response GrantCopyAuthorization(string analyzerId, string targetAzureResourceId, string targetRegion = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task GrantCopyAuthorizationAsync(string analyzerId, Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } + public virtual System.Threading.Tasks.Task> GrantCopyAuthorizationAsync(string analyzerId, string targetAzureResourceId, string targetRegion = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response UpdateAnalyzer(string analyzerId, Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } + public virtual System.Threading.Tasks.Task UpdateAnalyzerAsync(string analyzerId, Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } + public virtual Azure.Response UpdateDefaults(Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } + public virtual System.Threading.Tasks.Task UpdateDefaultsAsync(Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } + } + public static partial class ContentUnderstandingClientExtensions + { + public static Azure.Response UpdateAnalyzer(this Azure.AI.ContentUnderstanding.ContentUnderstandingClient client, string analyzerId, Azure.AI.ContentUnderstanding.ContentAnalyzer resource, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public static System.Threading.Tasks.Task UpdateAnalyzerAsync(this Azure.AI.ContentUnderstanding.ContentUnderstandingClient client, string analyzerId, Azure.AI.ContentUnderstanding.ContentAnalyzer resource, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public static Azure.Response UpdateDefaults(this Azure.AI.ContentUnderstanding.ContentUnderstandingClient client, System.Collections.Generic.IDictionary modelDeployments, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public static System.Threading.Tasks.Task> UpdateDefaultsAsync(this Azure.AI.ContentUnderstanding.ContentUnderstandingClient client, System.Collections.Generic.IDictionary modelDeployments, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + } + public partial class ContentUnderstandingClientOptions : Azure.Core.ClientOptions + { + public ContentUnderstandingClientOptions(Azure.AI.ContentUnderstanding.ContentUnderstandingClientOptions.ServiceVersion version = Azure.AI.ContentUnderstanding.ContentUnderstandingClientOptions.ServiceVersion.V2025_11_01) { } + public enum ServiceVersion + { + V2025_11_01 = 1, + } + } + public partial class ContentUnderstandingDefaults : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal ContentUnderstandingDefaults() { } + public System.Collections.Generic.IDictionary ModelDeployments { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.ContentUnderstandingDefaults JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + public static explicit operator Azure.AI.ContentUnderstanding.ContentUnderstandingDefaults (Azure.Response response) { throw null; } + protected virtual Azure.AI.ContentUnderstanding.ContentUnderstandingDefaults PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.ContentUnderstandingDefaults System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.ContentUnderstandingDefaults System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public static partial class ContentUnderstandingModelFactory + { + public static Azure.AI.ContentUnderstanding.AnalyzeInput AnalyzeInput(System.Uri url = null, System.BinaryData data = null, string name = null, string mimeType = null, string inputRange = null) { throw null; } + public static Azure.AI.ContentUnderstanding.AnalyzeResult AnalyzeResult(string analyzerId = null, string apiVersion = null, System.DateTimeOffset? createdAt = default(System.DateTimeOffset?), System.Collections.Generic.IEnumerable warnings = null, string stringEncoding = null, System.Collections.Generic.IEnumerable contents = null) { throw null; } + public static Azure.AI.ContentUnderstanding.ArrayField ArrayField(System.Collections.Generic.IEnumerable spans = null, float? confidence = default(float?), string source = null, System.Collections.Generic.IEnumerable valueArray = null) { throw null; } + public static Azure.AI.ContentUnderstanding.AudioVisualContent AudioVisualContent(string mimeType = null, string analyzerId = null, string category = null, string path = null, string markdown = null, System.Collections.Generic.IDictionary fields = null, long startTimeMs = (long)0, long endTimeMs = (long)0, int? width = default(int?), int? height = default(int?), System.Collections.Generic.IEnumerable cameraShotTimesMs = null, System.Collections.Generic.IEnumerable keyFrameTimesMs = null, System.Collections.Generic.IEnumerable transcriptPhrases = null, System.Collections.Generic.IEnumerable segments = null) { throw null; } + public static Azure.AI.ContentUnderstanding.AudioVisualContentSegment AudioVisualContentSegment(string segmentId = null, string category = null, Azure.AI.ContentUnderstanding.ContentSpan span = null, long startTimeMs = (long)0, long endTimeMs = (long)0) { throw null; } + public static Azure.AI.ContentUnderstanding.BooleanField BooleanField(System.Collections.Generic.IEnumerable spans = null, float? confidence = default(float?), string source = null, bool? valueBoolean = default(bool?)) { throw null; } + public static Azure.AI.ContentUnderstanding.ContentAnalyzer ContentAnalyzer(string analyzerId = null, string description = null, System.Collections.Generic.IDictionary tags = null, Azure.AI.ContentUnderstanding.ContentAnalyzerStatus status = default(Azure.AI.ContentUnderstanding.ContentAnalyzerStatus), System.DateTimeOffset createdAt = default(System.DateTimeOffset), System.DateTimeOffset lastModifiedAt = default(System.DateTimeOffset), System.Collections.Generic.IEnumerable warnings = null, string baseAnalyzerId = null, Azure.AI.ContentUnderstanding.ContentAnalyzerConfig config = null, Azure.AI.ContentUnderstanding.ContentFieldSchema fieldSchema = null, bool? dynamicFieldSchema = default(bool?), Azure.AI.ContentUnderstanding.ProcessingLocation? processingLocation = default(Azure.AI.ContentUnderstanding.ProcessingLocation?), System.Collections.Generic.IEnumerable knowledgeSources = null, System.Collections.Generic.IDictionary models = null, Azure.AI.ContentUnderstanding.SupportedModels supportedModels = null) { throw null; } + public static Azure.AI.ContentUnderstanding.ContentAnalyzerConfig ContentAnalyzerConfig(bool? returnDetails = default(bool?), System.Collections.Generic.IEnumerable locales = null, bool? enableOcr = default(bool?), bool? enableLayout = default(bool?), bool? enableFigureDescription = default(bool?), bool? enableFigureAnalysis = default(bool?), bool? enableFormula = default(bool?), Azure.AI.ContentUnderstanding.TableFormat? tableFormat = default(Azure.AI.ContentUnderstanding.TableFormat?), Azure.AI.ContentUnderstanding.ChartFormat? chartFormat = default(Azure.AI.ContentUnderstanding.ChartFormat?), Azure.AI.ContentUnderstanding.AnnotationFormat? annotationFormat = default(Azure.AI.ContentUnderstanding.AnnotationFormat?), bool? disableFaceBlurring = default(bool?), bool? estimateFieldSourceAndConfidence = default(bool?), System.Collections.Generic.IDictionary contentCategories = null, bool? enableSegment = default(bool?), bool? segmentPerPage = default(bool?), bool? omitContent = default(bool?)) { throw null; } + public static Azure.AI.ContentUnderstanding.ContentCategory ContentCategory(string description = null, string analyzerId = null, Azure.AI.ContentUnderstanding.ContentAnalyzer analyzer = null) { throw null; } + public static Azure.AI.ContentUnderstanding.ContentField ContentField(string type = null, System.Collections.Generic.IEnumerable spans = null, float? confidence = default(float?), string source = null) { throw null; } + public static Azure.AI.ContentUnderstanding.ContentFieldDefinition ContentFieldDefinition(Azure.AI.ContentUnderstanding.GenerationMethod? method = default(Azure.AI.ContentUnderstanding.GenerationMethod?), Azure.AI.ContentUnderstanding.ContentFieldType? type = default(Azure.AI.ContentUnderstanding.ContentFieldType?), string description = null, Azure.AI.ContentUnderstanding.ContentFieldDefinition itemDefinition = null, System.Collections.Generic.IDictionary properties = null, System.Collections.Generic.IEnumerable examples = null, System.Collections.Generic.IEnumerable @enum = null, System.Collections.Generic.IDictionary enumDescriptions = null, string @ref = null, bool? estimateSourceAndConfidence = default(bool?)) { throw null; } + public static Azure.AI.ContentUnderstanding.ContentFieldSchema ContentFieldSchema(string name = null, string description = null, System.Collections.Generic.IDictionary fields = null, System.Collections.Generic.IDictionary definitions = null) { throw null; } + public static Azure.AI.ContentUnderstanding.ContentSpan ContentSpan(int offset = 0, int length = 0) { throw null; } + public static Azure.AI.ContentUnderstanding.ContentUnderstandingDefaults ContentUnderstandingDefaults(System.Collections.Generic.IDictionary modelDeployments = null) { throw null; } + public static Azure.AI.ContentUnderstanding.CopyAuthorization CopyAuthorization(string source = null, string targetAzureResourceId = null, System.DateTimeOffset expiresAt = default(System.DateTimeOffset)) { throw null; } + public static Azure.AI.ContentUnderstanding.DateField DateField(System.Collections.Generic.IEnumerable spans = null, float? confidence = default(float?), string source = null, System.DateTimeOffset? valueDate = default(System.DateTimeOffset?)) { throw null; } + public static Azure.AI.ContentUnderstanding.DocumentAnnotation DocumentAnnotation(string id = null, Azure.AI.ContentUnderstanding.DocumentAnnotationKind kind = default(Azure.AI.ContentUnderstanding.DocumentAnnotationKind), System.Collections.Generic.IEnumerable spans = null, string source = null, System.Collections.Generic.IEnumerable comments = null, string author = null, System.DateTimeOffset? createdAt = default(System.DateTimeOffset?), System.DateTimeOffset? lastModifiedAt = default(System.DateTimeOffset?), System.Collections.Generic.IEnumerable tags = null) { throw null; } + public static Azure.AI.ContentUnderstanding.DocumentAnnotationComment DocumentAnnotationComment(string message = null, string author = null, System.DateTimeOffset? createdAt = default(System.DateTimeOffset?), System.DateTimeOffset? lastModifiedAt = default(System.DateTimeOffset?), System.Collections.Generic.IEnumerable tags = null) { throw null; } + public static Azure.AI.ContentUnderstanding.DocumentBarcode DocumentBarcode(Azure.AI.ContentUnderstanding.DocumentBarcodeKind kind = default(Azure.AI.ContentUnderstanding.DocumentBarcodeKind), string value = null, string source = null, Azure.AI.ContentUnderstanding.ContentSpan span = null, float? confidence = default(float?)) { throw null; } + public static Azure.AI.ContentUnderstanding.DocumentCaption DocumentCaption(string content = null, string source = null, Azure.AI.ContentUnderstanding.ContentSpan span = null, System.Collections.Generic.IEnumerable elements = null) { throw null; } + public static Azure.AI.ContentUnderstanding.DocumentChartFigure DocumentChartFigure(string id = null, string source = null, Azure.AI.ContentUnderstanding.ContentSpan span = null, System.Collections.Generic.IEnumerable elements = null, Azure.AI.ContentUnderstanding.DocumentCaption caption = null, System.Collections.Generic.IEnumerable footnotes = null, string description = null, Azure.AI.ContentUnderstanding.SemanticRole? role = default(Azure.AI.ContentUnderstanding.SemanticRole?), System.Collections.Generic.IDictionary content = null) { throw null; } + public static Azure.AI.ContentUnderstanding.DocumentContent DocumentContent(string mimeType = null, string analyzerId = null, string category = null, string path = null, string markdown = null, System.Collections.Generic.IDictionary fields = null, int startPageNumber = 0, int endPageNumber = 0, Azure.AI.ContentUnderstanding.LengthUnit? unit = default(Azure.AI.ContentUnderstanding.LengthUnit?), System.Collections.Generic.IEnumerable pages = null, System.Collections.Generic.IEnumerable paragraphs = null, System.Collections.Generic.IEnumerable sections = null, System.Collections.Generic.IEnumerable tables = null, System.Collections.Generic.IEnumerable figures = null, System.Collections.Generic.IEnumerable annotations = null, System.Collections.Generic.IEnumerable hyperlinks = null, System.Collections.Generic.IEnumerable segments = null) { throw null; } + public static Azure.AI.ContentUnderstanding.DocumentContentSegment DocumentContentSegment(string segmentId = null, string category = null, Azure.AI.ContentUnderstanding.ContentSpan span = null, int startPageNumber = 0, int endPageNumber = 0) { throw null; } + public static Azure.AI.ContentUnderstanding.DocumentFigure DocumentFigure(string kind = null, string id = null, string source = null, Azure.AI.ContentUnderstanding.ContentSpan span = null, System.Collections.Generic.IEnumerable elements = null, Azure.AI.ContentUnderstanding.DocumentCaption caption = null, System.Collections.Generic.IEnumerable footnotes = null, string description = null, Azure.AI.ContentUnderstanding.SemanticRole? role = default(Azure.AI.ContentUnderstanding.SemanticRole?)) { throw null; } + public static Azure.AI.ContentUnderstanding.DocumentFootnote DocumentFootnote(string content = null, string source = null, Azure.AI.ContentUnderstanding.ContentSpan span = null, System.Collections.Generic.IEnumerable elements = null) { throw null; } + public static Azure.AI.ContentUnderstanding.DocumentFormula DocumentFormula(Azure.AI.ContentUnderstanding.DocumentFormulaKind kind = default(Azure.AI.ContentUnderstanding.DocumentFormulaKind), string value = null, string source = null, Azure.AI.ContentUnderstanding.ContentSpan span = null, float? confidence = default(float?)) { throw null; } + public static Azure.AI.ContentUnderstanding.DocumentHyperlink DocumentHyperlink(string content = null, string url = null, Azure.AI.ContentUnderstanding.ContentSpan span = null, string source = null) { throw null; } + public static Azure.AI.ContentUnderstanding.DocumentLine DocumentLine(string content = null, string source = null, Azure.AI.ContentUnderstanding.ContentSpan span = null) { throw null; } + public static Azure.AI.ContentUnderstanding.DocumentMermaidFigure DocumentMermaidFigure(string id = null, string source = null, Azure.AI.ContentUnderstanding.ContentSpan span = null, System.Collections.Generic.IEnumerable elements = null, Azure.AI.ContentUnderstanding.DocumentCaption caption = null, System.Collections.Generic.IEnumerable footnotes = null, string description = null, Azure.AI.ContentUnderstanding.SemanticRole? role = default(Azure.AI.ContentUnderstanding.SemanticRole?), string content = null) { throw null; } + public static Azure.AI.ContentUnderstanding.DocumentPage DocumentPage(int pageNumber = 0, float? width = default(float?), float? height = default(float?), System.Collections.Generic.IEnumerable spans = null, float? angle = default(float?), System.Collections.Generic.IEnumerable words = null, System.Collections.Generic.IEnumerable lines = null, System.Collections.Generic.IEnumerable barcodes = null, System.Collections.Generic.IEnumerable formulas = null) { throw null; } + public static Azure.AI.ContentUnderstanding.DocumentParagraph DocumentParagraph(Azure.AI.ContentUnderstanding.SemanticRole? role = default(Azure.AI.ContentUnderstanding.SemanticRole?), string content = null, string source = null, Azure.AI.ContentUnderstanding.ContentSpan span = null) { throw null; } + public static Azure.AI.ContentUnderstanding.DocumentSection DocumentSection(Azure.AI.ContentUnderstanding.ContentSpan span = null, System.Collections.Generic.IEnumerable elements = null) { throw null; } + public static Azure.AI.ContentUnderstanding.DocumentTable DocumentTable(int rowCount = 0, int columnCount = 0, System.Collections.Generic.IEnumerable cells = null, string source = null, Azure.AI.ContentUnderstanding.ContentSpan span = null, Azure.AI.ContentUnderstanding.DocumentCaption caption = null, System.Collections.Generic.IEnumerable footnotes = null, Azure.AI.ContentUnderstanding.SemanticRole? role = default(Azure.AI.ContentUnderstanding.SemanticRole?)) { throw null; } + public static Azure.AI.ContentUnderstanding.DocumentTableCell DocumentTableCell(Azure.AI.ContentUnderstanding.DocumentTableCellKind? kind = default(Azure.AI.ContentUnderstanding.DocumentTableCellKind?), int rowIndex = 0, int columnIndex = 0, int? rowSpan = default(int?), int? columnSpan = default(int?), string content = null, string source = null, Azure.AI.ContentUnderstanding.ContentSpan span = null, System.Collections.Generic.IEnumerable elements = null) { throw null; } + public static Azure.AI.ContentUnderstanding.DocumentWord DocumentWord(string content = null, string source = null, Azure.AI.ContentUnderstanding.ContentSpan span = null, float? confidence = default(float?)) { throw null; } + public static Azure.AI.ContentUnderstanding.IntegerField IntegerField(System.Collections.Generic.IEnumerable spans = null, float? confidence = default(float?), string source = null, long? valueInteger = default(long?)) { throw null; } + public static Azure.AI.ContentUnderstanding.JsonField JsonField(System.Collections.Generic.IEnumerable spans = null, float? confidence = default(float?), string source = null, System.BinaryData valueJson = null) { throw null; } + public static Azure.AI.ContentUnderstanding.KnowledgeSource KnowledgeSource(string kind = null) { throw null; } + public static Azure.AI.ContentUnderstanding.LabeledDataKnowledgeSource LabeledDataKnowledgeSource(System.Uri containerUrl = null, string prefix = null, string fileListPath = null) { throw null; } + public static Azure.AI.ContentUnderstanding.MediaContent MediaContent(string kind = null, string mimeType = null, string analyzerId = null, string category = null, string path = null, string markdown = null, System.Collections.Generic.IDictionary fields = null) { throw null; } + public static Azure.AI.ContentUnderstanding.NumberField NumberField(System.Collections.Generic.IEnumerable spans = null, float? confidence = default(float?), string source = null, double? valueNumber = default(double?)) { throw null; } + public static Azure.AI.ContentUnderstanding.ObjectField ObjectField(System.Collections.Generic.IEnumerable spans = null, float? confidence = default(float?), string source = null, System.Collections.Generic.IDictionary valueObject = null) { throw null; } + public static Azure.AI.ContentUnderstanding.StringField StringField(System.Collections.Generic.IEnumerable spans = null, float? confidence = default(float?), string source = null, string valueString = null) { throw null; } + public static Azure.AI.ContentUnderstanding.SupportedModels SupportedModels(System.Collections.Generic.IEnumerable completion = null, System.Collections.Generic.IEnumerable embedding = null) { throw null; } + public static Azure.AI.ContentUnderstanding.TimeField TimeField(System.Collections.Generic.IEnumerable spans = null, float? confidence = default(float?), string source = null, System.TimeSpan? valueTime = default(System.TimeSpan?)) { throw null; } + public static Azure.AI.ContentUnderstanding.TranscriptPhrase TranscriptPhrase(string speaker = null, long startTimeMs = (long)0, long endTimeMs = (long)0, string locale = null, string text = null, float? confidence = default(float?), Azure.AI.ContentUnderstanding.ContentSpan span = null, System.Collections.Generic.IEnumerable words = null) { throw null; } + public static Azure.AI.ContentUnderstanding.TranscriptWord TranscriptWord(long startTimeMs = (long)0, long endTimeMs = (long)0, string text = null, Azure.AI.ContentUnderstanding.ContentSpan span = null) { throw null; } + } + public partial class CopyAuthorization : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal CopyAuthorization() { } + public System.DateTimeOffset ExpiresAt { get { throw null; } } + public string Source { get { throw null; } } + public string TargetAzureResourceId { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.CopyAuthorization JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + public static explicit operator Azure.AI.ContentUnderstanding.CopyAuthorization (Azure.Response response) { throw null; } + protected virtual Azure.AI.ContentUnderstanding.CopyAuthorization PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.CopyAuthorization System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.CopyAuthorization System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class DateField : Azure.AI.ContentUnderstanding.ContentField, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal DateField() { } + public System.DateTimeOffset? ValueDate { get { throw null; } } + protected override Azure.AI.ContentUnderstanding.ContentField JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected override Azure.AI.ContentUnderstanding.ContentField PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.DateField System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.DateField System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class DocumentAnnotation : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal DocumentAnnotation() { } + public string Author { get { throw null; } } + public System.Collections.Generic.IList Comments { get { throw null; } } + public System.DateTimeOffset? CreatedAt { get { throw null; } } + public string Id { get { throw null; } } + public Azure.AI.ContentUnderstanding.DocumentAnnotationKind Kind { get { throw null; } } + public System.DateTimeOffset? LastModifiedAt { get { throw null; } } + public string Source { get { throw null; } } + public System.Collections.Generic.IList Spans { get { throw null; } } + public System.Collections.Generic.IList Tags { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.DocumentAnnotation JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.DocumentAnnotation PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.DocumentAnnotation System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.DocumentAnnotation System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class DocumentAnnotationComment : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal DocumentAnnotationComment() { } + public string Author { get { throw null; } } + public System.DateTimeOffset? CreatedAt { get { throw null; } } + public System.DateTimeOffset? LastModifiedAt { get { throw null; } } + public string Message { get { throw null; } } + public System.Collections.Generic.IList Tags { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.DocumentAnnotationComment JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.DocumentAnnotationComment PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.DocumentAnnotationComment System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.DocumentAnnotationComment System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct DocumentAnnotationKind : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public DocumentAnnotationKind(string value) { throw null; } + public static Azure.AI.ContentUnderstanding.DocumentAnnotationKind Bold { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentAnnotationKind Circle { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentAnnotationKind Highlight { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentAnnotationKind Italic { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentAnnotationKind Note { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentAnnotationKind Strikethrough { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentAnnotationKind Underline { get { throw null; } } + public bool Equals(Azure.AI.ContentUnderstanding.DocumentAnnotationKind other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.AI.ContentUnderstanding.DocumentAnnotationKind left, Azure.AI.ContentUnderstanding.DocumentAnnotationKind right) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.DocumentAnnotationKind (string value) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.DocumentAnnotationKind? (string value) { throw null; } + public static bool operator !=(Azure.AI.ContentUnderstanding.DocumentAnnotationKind left, Azure.AI.ContentUnderstanding.DocumentAnnotationKind right) { throw null; } + public override string ToString() { throw null; } + } + public partial class DocumentBarcode : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal DocumentBarcode() { } + public float? Confidence { get { throw null; } } + public Azure.AI.ContentUnderstanding.DocumentBarcodeKind Kind { get { throw null; } } + public string Source { get { throw null; } } + public Azure.AI.ContentUnderstanding.ContentSpan Span { get { throw null; } } + public string Value { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.DocumentBarcode JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.DocumentBarcode PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.DocumentBarcode System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.DocumentBarcode System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct DocumentBarcodeKind : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public DocumentBarcodeKind(string value) { throw null; } + public static Azure.AI.ContentUnderstanding.DocumentBarcodeKind Aztec { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentBarcodeKind Codabar { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentBarcodeKind Code128 { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentBarcodeKind Code39 { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentBarcodeKind Code93 { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentBarcodeKind DataBar { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentBarcodeKind DataBarExpanded { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentBarcodeKind DataMatrix { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentBarcodeKind EAN13 { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentBarcodeKind EAN8 { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentBarcodeKind ITF { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentBarcodeKind MaxiCode { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentBarcodeKind MicroQRCode { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentBarcodeKind PDF417 { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentBarcodeKind QRCode { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentBarcodeKind UPCA { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentBarcodeKind UPCE { get { throw null; } } + public bool Equals(Azure.AI.ContentUnderstanding.DocumentBarcodeKind other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.AI.ContentUnderstanding.DocumentBarcodeKind left, Azure.AI.ContentUnderstanding.DocumentBarcodeKind right) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.DocumentBarcodeKind (string value) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.DocumentBarcodeKind? (string value) { throw null; } + public static bool operator !=(Azure.AI.ContentUnderstanding.DocumentBarcodeKind left, Azure.AI.ContentUnderstanding.DocumentBarcodeKind right) { throw null; } + public override string ToString() { throw null; } + } + public partial class DocumentCaption : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal DocumentCaption() { } + public string Content { get { throw null; } } + public System.Collections.Generic.IList Elements { get { throw null; } } + public string Source { get { throw null; } } + public Azure.AI.ContentUnderstanding.ContentSpan Span { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.DocumentCaption JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.DocumentCaption PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.DocumentCaption System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.DocumentCaption System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class DocumentChartFigure : Azure.AI.ContentUnderstanding.DocumentFigure, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal DocumentChartFigure() { } + public System.Collections.Generic.IDictionary Content { get { throw null; } } + protected override Azure.AI.ContentUnderstanding.DocumentFigure JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected override Azure.AI.ContentUnderstanding.DocumentFigure PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.DocumentChartFigure System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.DocumentChartFigure System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class DocumentContent : Azure.AI.ContentUnderstanding.MediaContent, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal DocumentContent() { } + public System.Collections.Generic.IList Annotations { get { throw null; } } + public int EndPageNumber { get { throw null; } } + public System.Collections.Generic.IList Figures { get { throw null; } } + public System.Collections.Generic.IList Hyperlinks { get { throw null; } } + public Azure.AI.ContentUnderstanding.ContentField this[string fieldName] { get { throw null; } } + public System.Collections.Generic.IList Pages { get { throw null; } } + public System.Collections.Generic.IList Paragraphs { get { throw null; } } + public System.Collections.Generic.IList Sections { get { throw null; } } + public System.Collections.Generic.IList Segments { get { throw null; } } + public int StartPageNumber { get { throw null; } } + public System.Collections.Generic.IList Tables { get { throw null; } } + public Azure.AI.ContentUnderstanding.LengthUnit? Unit { get { throw null; } } + protected override Azure.AI.ContentUnderstanding.MediaContent JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected override Azure.AI.ContentUnderstanding.MediaContent PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.DocumentContent System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.DocumentContent System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class DocumentContentSegment : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal DocumentContentSegment() { } + public string Category { get { throw null; } } + public int EndPageNumber { get { throw null; } } + public string SegmentId { get { throw null; } } + public Azure.AI.ContentUnderstanding.ContentSpan Span { get { throw null; } } + public int StartPageNumber { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.DocumentContentSegment JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.DocumentContentSegment PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.DocumentContentSegment System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.DocumentContentSegment System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public abstract partial class DocumentFigure : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal DocumentFigure() { } + public Azure.AI.ContentUnderstanding.DocumentCaption Caption { get { throw null; } } + public string Description { get { throw null; } } + public System.Collections.Generic.IList Elements { get { throw null; } } + public System.Collections.Generic.IList Footnotes { get { throw null; } } + public string Id { get { throw null; } } + public Azure.AI.ContentUnderstanding.SemanticRole? Role { get { throw null; } } + public string Source { get { throw null; } } + public Azure.AI.ContentUnderstanding.ContentSpan Span { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.DocumentFigure JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.DocumentFigure PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.DocumentFigure System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.DocumentFigure System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class DocumentFootnote : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal DocumentFootnote() { } + public string Content { get { throw null; } } + public System.Collections.Generic.IList Elements { get { throw null; } } + public string Source { get { throw null; } } + public Azure.AI.ContentUnderstanding.ContentSpan Span { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.DocumentFootnote JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.DocumentFootnote PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.DocumentFootnote System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.DocumentFootnote System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class DocumentFormula : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal DocumentFormula() { } + public float? Confidence { get { throw null; } } + public Azure.AI.ContentUnderstanding.DocumentFormulaKind Kind { get { throw null; } } + public string Source { get { throw null; } } + public Azure.AI.ContentUnderstanding.ContentSpan Span { get { throw null; } } + public string Value { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.DocumentFormula JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.DocumentFormula PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.DocumentFormula System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.DocumentFormula System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct DocumentFormulaKind : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public DocumentFormulaKind(string value) { throw null; } + public static Azure.AI.ContentUnderstanding.DocumentFormulaKind Display { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentFormulaKind Inline { get { throw null; } } + public bool Equals(Azure.AI.ContentUnderstanding.DocumentFormulaKind other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.AI.ContentUnderstanding.DocumentFormulaKind left, Azure.AI.ContentUnderstanding.DocumentFormulaKind right) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.DocumentFormulaKind (string value) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.DocumentFormulaKind? (string value) { throw null; } + public static bool operator !=(Azure.AI.ContentUnderstanding.DocumentFormulaKind left, Azure.AI.ContentUnderstanding.DocumentFormulaKind right) { throw null; } + public override string ToString() { throw null; } + } + public partial class DocumentHyperlink : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal DocumentHyperlink() { } + public string Content { get { throw null; } } + public string Source { get { throw null; } } + public Azure.AI.ContentUnderstanding.ContentSpan Span { get { throw null; } } + public string Url { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.DocumentHyperlink JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.DocumentHyperlink PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.DocumentHyperlink System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.DocumentHyperlink System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class DocumentLine : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal DocumentLine() { } + public string Content { get { throw null; } } + public string Source { get { throw null; } } + public Azure.AI.ContentUnderstanding.ContentSpan Span { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.DocumentLine JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.DocumentLine PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.DocumentLine System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.DocumentLine System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class DocumentMermaidFigure : Azure.AI.ContentUnderstanding.DocumentFigure, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal DocumentMermaidFigure() { } + public string Content { get { throw null; } } + protected override Azure.AI.ContentUnderstanding.DocumentFigure JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected override Azure.AI.ContentUnderstanding.DocumentFigure PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.DocumentMermaidFigure System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.DocumentMermaidFigure System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class DocumentPage : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal DocumentPage() { } + public float? Angle { get { throw null; } } + public System.Collections.Generic.IList Barcodes { get { throw null; } } + public System.Collections.Generic.IList Formulas { get { throw null; } } + public float? Height { get { throw null; } } + public System.Collections.Generic.IList Lines { get { throw null; } } + public int PageNumber { get { throw null; } } + public System.Collections.Generic.IList Spans { get { throw null; } } + public float? Width { get { throw null; } } + public System.Collections.Generic.IList Words { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.DocumentPage JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.DocumentPage PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.DocumentPage System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.DocumentPage System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class DocumentParagraph : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal DocumentParagraph() { } + public string Content { get { throw null; } } + public Azure.AI.ContentUnderstanding.SemanticRole? Role { get { throw null; } } + public string Source { get { throw null; } } + public Azure.AI.ContentUnderstanding.ContentSpan Span { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.DocumentParagraph JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.DocumentParagraph PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.DocumentParagraph System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.DocumentParagraph System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class DocumentSection : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal DocumentSection() { } + public System.Collections.Generic.IList Elements { get { throw null; } } + public Azure.AI.ContentUnderstanding.ContentSpan Span { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.DocumentSection JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.DocumentSection PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.DocumentSection System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.DocumentSection System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class DocumentTable : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal DocumentTable() { } + public Azure.AI.ContentUnderstanding.DocumentCaption Caption { get { throw null; } } + public System.Collections.Generic.IList Cells { get { throw null; } } + public int ColumnCount { get { throw null; } } + public System.Collections.Generic.IList Footnotes { get { throw null; } } + public Azure.AI.ContentUnderstanding.SemanticRole? Role { get { throw null; } } + public int RowCount { get { throw null; } } + public string Source { get { throw null; } } + public Azure.AI.ContentUnderstanding.ContentSpan Span { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.DocumentTable JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.DocumentTable PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.DocumentTable System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.DocumentTable System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class DocumentTableCell : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal DocumentTableCell() { } + public int ColumnIndex { get { throw null; } } + public int? ColumnSpan { get { throw null; } } + public string Content { get { throw null; } } + public System.Collections.Generic.IList Elements { get { throw null; } } + public Azure.AI.ContentUnderstanding.DocumentTableCellKind? Kind { get { throw null; } } + public int RowIndex { get { throw null; } } + public int? RowSpan { get { throw null; } } + public string Source { get { throw null; } } + public Azure.AI.ContentUnderstanding.ContentSpan Span { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.DocumentTableCell JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.DocumentTableCell PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.DocumentTableCell System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.DocumentTableCell System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct DocumentTableCellKind : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public DocumentTableCellKind(string value) { throw null; } + public static Azure.AI.ContentUnderstanding.DocumentTableCellKind ColumnHeader { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentTableCellKind Content { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentTableCellKind Description { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentTableCellKind RowHeader { get { throw null; } } + public static Azure.AI.ContentUnderstanding.DocumentTableCellKind StubHead { get { throw null; } } + public bool Equals(Azure.AI.ContentUnderstanding.DocumentTableCellKind other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.AI.ContentUnderstanding.DocumentTableCellKind left, Azure.AI.ContentUnderstanding.DocumentTableCellKind right) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.DocumentTableCellKind (string value) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.DocumentTableCellKind? (string value) { throw null; } + public static bool operator !=(Azure.AI.ContentUnderstanding.DocumentTableCellKind left, Azure.AI.ContentUnderstanding.DocumentTableCellKind right) { throw null; } + public override string ToString() { throw null; } + } + public partial class DocumentWord : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal DocumentWord() { } + public float? Confidence { get { throw null; } } + public string Content { get { throw null; } } + public string Source { get { throw null; } } + public Azure.AI.ContentUnderstanding.ContentSpan Span { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.DocumentWord JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.DocumentWord PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.DocumentWord System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.DocumentWord System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct GenerationMethod : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public GenerationMethod(string value) { throw null; } + public static Azure.AI.ContentUnderstanding.GenerationMethod Classify { get { throw null; } } + public static Azure.AI.ContentUnderstanding.GenerationMethod Extract { get { throw null; } } + public static Azure.AI.ContentUnderstanding.GenerationMethod Generate { get { throw null; } } + public bool Equals(Azure.AI.ContentUnderstanding.GenerationMethod other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.AI.ContentUnderstanding.GenerationMethod left, Azure.AI.ContentUnderstanding.GenerationMethod right) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.GenerationMethod (string value) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.GenerationMethod? (string value) { throw null; } + public static bool operator !=(Azure.AI.ContentUnderstanding.GenerationMethod left, Azure.AI.ContentUnderstanding.GenerationMethod right) { throw null; } + public override string ToString() { throw null; } + } + public partial class IntegerField : Azure.AI.ContentUnderstanding.ContentField, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal IntegerField() { } + public long? ValueInteger { get { throw null; } } + protected override Azure.AI.ContentUnderstanding.ContentField JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected override Azure.AI.ContentUnderstanding.ContentField PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.IntegerField System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.IntegerField System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class JsonField : Azure.AI.ContentUnderstanding.ContentField, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal JsonField() { } + public System.BinaryData ValueJson { get { throw null; } } + protected override Azure.AI.ContentUnderstanding.ContentField JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected override Azure.AI.ContentUnderstanding.ContentField PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.JsonField System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.JsonField System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public abstract partial class KnowledgeSource : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal KnowledgeSource() { } + protected virtual Azure.AI.ContentUnderstanding.KnowledgeSource JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.KnowledgeSource PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.KnowledgeSource System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.KnowledgeSource System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class LabeledDataKnowledgeSource : Azure.AI.ContentUnderstanding.KnowledgeSource, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + public LabeledDataKnowledgeSource(System.Uri containerUrl, string fileListPath) { } + public System.Uri ContainerUrl { get { throw null; } set { } } + public string FileListPath { get { throw null; } set { } } + public string Prefix { get { throw null; } set { } } + protected override Azure.AI.ContentUnderstanding.KnowledgeSource JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected override Azure.AI.ContentUnderstanding.KnowledgeSource PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.LabeledDataKnowledgeSource System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.LabeledDataKnowledgeSource System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct LengthUnit : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public LengthUnit(string value) { throw null; } + public static Azure.AI.ContentUnderstanding.LengthUnit Inch { get { throw null; } } + public static Azure.AI.ContentUnderstanding.LengthUnit Pixel { get { throw null; } } + public bool Equals(Azure.AI.ContentUnderstanding.LengthUnit other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.AI.ContentUnderstanding.LengthUnit left, Azure.AI.ContentUnderstanding.LengthUnit right) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.LengthUnit (string value) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.LengthUnit? (string value) { throw null; } + public static bool operator !=(Azure.AI.ContentUnderstanding.LengthUnit left, Azure.AI.ContentUnderstanding.LengthUnit right) { throw null; } + public override string ToString() { throw null; } + } + public abstract partial class MediaContent : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal MediaContent() { } + public string AnalyzerId { get { throw null; } } + public string Category { get { throw null; } } + public System.Collections.Generic.IDictionary Fields { get { throw null; } } + public string Markdown { get { throw null; } } + public string MimeType { get { throw null; } } + public string Path { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.MediaContent JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.MediaContent PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.MediaContent System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.MediaContent System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class NumberField : Azure.AI.ContentUnderstanding.ContentField, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal NumberField() { } + public double? ValueNumber { get { throw null; } } + protected override Azure.AI.ContentUnderstanding.ContentField JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected override Azure.AI.ContentUnderstanding.ContentField PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.NumberField System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.NumberField System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class ObjectField : Azure.AI.ContentUnderstanding.ContentField, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal ObjectField() { } + public Azure.AI.ContentUnderstanding.ContentField this[string fieldName] { get { throw null; } } + public System.Collections.Generic.IDictionary ValueObject { get { throw null; } } + protected override Azure.AI.ContentUnderstanding.ContentField JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected override Azure.AI.ContentUnderstanding.ContentField PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.ObjectField System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.ObjectField System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct ProcessingLocation : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public ProcessingLocation(string value) { throw null; } + public static Azure.AI.ContentUnderstanding.ProcessingLocation DataZone { get { throw null; } } + public static Azure.AI.ContentUnderstanding.ProcessingLocation Geography { get { throw null; } } + public static Azure.AI.ContentUnderstanding.ProcessingLocation Global { get { throw null; } } + public bool Equals(Azure.AI.ContentUnderstanding.ProcessingLocation other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.AI.ContentUnderstanding.ProcessingLocation left, Azure.AI.ContentUnderstanding.ProcessingLocation right) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.ProcessingLocation (string value) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.ProcessingLocation? (string value) { throw null; } + public static bool operator !=(Azure.AI.ContentUnderstanding.ProcessingLocation left, Azure.AI.ContentUnderstanding.ProcessingLocation right) { throw null; } + public override string ToString() { throw null; } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct SemanticRole : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public SemanticRole(string value) { throw null; } + public static Azure.AI.ContentUnderstanding.SemanticRole Footnote { get { throw null; } } + public static Azure.AI.ContentUnderstanding.SemanticRole FormulaBlock { get { throw null; } } + public static Azure.AI.ContentUnderstanding.SemanticRole PageFooter { get { throw null; } } + public static Azure.AI.ContentUnderstanding.SemanticRole PageHeader { get { throw null; } } + public static Azure.AI.ContentUnderstanding.SemanticRole PageNumber { get { throw null; } } + public static Azure.AI.ContentUnderstanding.SemanticRole SectionHeading { get { throw null; } } + public static Azure.AI.ContentUnderstanding.SemanticRole Title { get { throw null; } } + public bool Equals(Azure.AI.ContentUnderstanding.SemanticRole other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.AI.ContentUnderstanding.SemanticRole left, Azure.AI.ContentUnderstanding.SemanticRole right) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.SemanticRole (string value) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.SemanticRole? (string value) { throw null; } + public static bool operator !=(Azure.AI.ContentUnderstanding.SemanticRole left, Azure.AI.ContentUnderstanding.SemanticRole right) { throw null; } + public override string ToString() { throw null; } + } + public partial class StringField : Azure.AI.ContentUnderstanding.ContentField, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal StringField() { } + public string ValueString { get { throw null; } } + protected override Azure.AI.ContentUnderstanding.ContentField JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected override Azure.AI.ContentUnderstanding.ContentField PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.StringField System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.StringField System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class SupportedModels : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal SupportedModels() { } + public System.Collections.Generic.IList Completion { get { throw null; } } + public System.Collections.Generic.IList Embedding { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.SupportedModels JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.SupportedModels PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.SupportedModels System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.SupportedModels System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct TableFormat : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public TableFormat(string value) { throw null; } + public static Azure.AI.ContentUnderstanding.TableFormat Html { get { throw null; } } + public static Azure.AI.ContentUnderstanding.TableFormat Markdown { get { throw null; } } + public bool Equals(Azure.AI.ContentUnderstanding.TableFormat other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.AI.ContentUnderstanding.TableFormat left, Azure.AI.ContentUnderstanding.TableFormat right) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.TableFormat (string value) { throw null; } + public static implicit operator Azure.AI.ContentUnderstanding.TableFormat? (string value) { throw null; } + public static bool operator !=(Azure.AI.ContentUnderstanding.TableFormat left, Azure.AI.ContentUnderstanding.TableFormat right) { throw null; } + public override string ToString() { throw null; } + } + public partial class TimeField : Azure.AI.ContentUnderstanding.ContentField, System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal TimeField() { } + public System.TimeSpan? ValueTime { get { throw null; } } + protected override Azure.AI.ContentUnderstanding.ContentField JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected override Azure.AI.ContentUnderstanding.ContentField PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected override System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.TimeField System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.TimeField System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class TranscriptPhrase : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal TranscriptPhrase() { } + public float? Confidence { get { throw null; } } + public long EndTimeMs { get { throw null; } } + public string Locale { get { throw null; } } + public Azure.AI.ContentUnderstanding.ContentSpan Span { get { throw null; } } + public string Speaker { get { throw null; } } + public long StartTimeMs { get { throw null; } } + public string Text { get { throw null; } } + public System.Collections.Generic.IList Words { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.TranscriptPhrase JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.TranscriptPhrase PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.TranscriptPhrase System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.TranscriptPhrase System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class TranscriptWord : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal TranscriptWord() { } + public long EndTimeMs { get { throw null; } } + public Azure.AI.ContentUnderstanding.ContentSpan Span { get { throw null; } } + public long StartTimeMs { get { throw null; } } + public string Text { get { throw null; } } + protected virtual Azure.AI.ContentUnderstanding.TranscriptWord JsonModelCreateCore(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + protected virtual Azure.AI.ContentUnderstanding.TranscriptWord PersistableModelCreateCore(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + protected virtual System.BinaryData PersistableModelWriteCore(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + Azure.AI.ContentUnderstanding.TranscriptWord System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentUnderstanding.TranscriptWord System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } +} +namespace Microsoft.Extensions.Azure +{ + public static partial class ContentUnderstandingClientBuilderExtensions + { + public static Azure.Core.Extensions.IAzureClientBuilder AddContentUnderstandingClient(this TBuilder builder, System.Uri endpoint) where TBuilder : Azure.Core.Extensions.IAzureClientFactoryBuilderWithCredential { throw null; } + public static Azure.Core.Extensions.IAzureClientBuilder AddContentUnderstandingClient(this TBuilder builder, System.Uri endpoint, Azure.AzureKeyCredential credential) where TBuilder : Azure.Core.Extensions.IAzureClientFactoryBuilder { throw null; } + public static Azure.Core.Extensions.IAzureClientBuilder AddContentUnderstandingClient(this TBuilder builder, TConfiguration configuration) where TBuilder : Azure.Core.Extensions.IAzureClientFactoryBuilderWithConfiguration { throw null; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/assets.json b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/assets.json new file mode 100644 index 000000000000..a9210228574c --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/assets.json @@ -0,0 +1,7 @@ +{ + "AssetsRepo": "Azure/azure-sdk-assets", + "AssetsRepoPrefixPath": "net", + "TagPrefix": "net/contentunderstanding/Azure.AI.ContentUnderstanding", + "Tag": "net/contentunderstanding/Azure.AI.ContentUnderstanding_13b81776c9" +} + diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/cspell.yaml b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/cspell.yaml new file mode 100644 index 000000000000..5ee21cf45f5d --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/cspell.yaml @@ -0,0 +1,10 @@ +import: + - ../../../.vscode/cspell.json +overrides: + - filename: '**/sdk/contentunderstanding/**/*.cs' + words: + - upca + - upce + - UPCA + - UPCE + diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample00_ConfigureDefaults.md b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample00_ConfigureDefaults.md new file mode 100644 index 000000000000..39221bdb09ac --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample00_ConfigureDefaults.md @@ -0,0 +1,111 @@ +# Configure model deployment defaults + +This sample demonstrates how to configure and retrieve default model deployment settings for your Microsoft Foundry resource. This is a **required one-time setup** before using prebuilt analyzers. + +## About model deployment configuration + +Content Understanding prebuilt analyzers require specific large language model deployments to function. Currently, Content Understanding uses OpenAI GPT models: + +- **gpt-4.1** - Used by most prebuilt analyzers (e.g., `prebuilt-invoice`, `prebuilt-receipt`, `prebuilt-idDocument`) +- **gpt-4.1-mini** - Used by RAG analyzers (e.g., `prebuilt-documentSearch`, `prebuilt-imageSearch`, `prebuilt-audioSearch`, `prebuilt-videoSearch`) +- **text-embedding-3-large** - Used for semantic search and embeddings + +This configuration is **per Microsoft Foundry resource** and persists across sessions. You only need to configure it once per Microsoft Foundry resource (or when you change deployment names). + +## Prerequisites + +To get started you'll need a **Microsoft Foundry resource**. See [README][README] for prerequisites and instructions. + +You also need to have deployed the following models in Microsoft Foundry: +- gpt-4.1 +- gpt-4.1-mini +- text-embedding-3-large + +## Creating a `ContentUnderstandingClient` + +The `ContentUnderstandingClient` is the main interface for interacting with the Content Understanding service. In this sample, you'll use the client to: +- Retrieve current model deployment defaults (`GetDefaultsAsync`) +- Update model deployment mappings (`UpdateDefaultsAsync`) + +To create a new `ContentUnderstandingClient` you need the endpoint and credentials from your Microsoft Foundry resource. You can authenticate using either `DefaultAzureCredential` (recommended) or an API key. + +### Using DefaultAzureCredential (recommended) + +```C# Snippet:CreateContentUnderstandingClient +string endpoint = ""; +var credential = new DefaultAzureCredential(); +var client = new ContentUnderstandingClient(new Uri(endpoint), credential); +``` + +### Using API key + +```C# Snippet:CreateContentUnderstandingClientApiKey +string endpoint = ""; +string apiKey = ""; +var client = new ContentUnderstandingClient(new Uri(endpoint), new AzureKeyCredential(apiKey)); +``` + +> **āš ļø Security Warning**: API key authentication is not secure and is only recommended for testing purposes with test resources. For production, use `DefaultAzureCredential` or other secure authentication methods. + +## Configure model deployments + +Before you can use prebuilt analyzers, you need to map your deployed large language models to the models required by the prebuilt analyzers. Currently, Content Understanding uses OpenAI GPT models: + +```C# Snippet:ContentUnderstandingUpdateDefaults +// Map your deployed models to the models required by prebuilt analyzers +var modelDeployments = new Dictionary +{ + ["gpt-4.1"] = "", + ["gpt-4.1-mini"] = "", + ["text-embedding-3-large"] = "" +}; + +var response = await client.UpdateDefaultsAsync(modelDeployments); +ContentUnderstandingDefaults updatedDefaults = response.Value; + +Console.WriteLine("Model deployments configured successfully!"); +foreach (var kvp in updatedDefaults.ModelDeployments) +{ + Console.WriteLine($" {kvp.Key} → {kvp.Value}"); +} +``` + +## Retrieve current defaults + +You can retrieve the current default model deployment configuration: + +```C# Snippet:ContentUnderstandingGetDefaults +var getResponse = await client.GetDefaultsAsync(); +ContentUnderstandingDefaults defaults = getResponse.Value; + +Console.WriteLine("Current model deployment mappings:"); +if (defaults.ModelDeployments != null && defaults.ModelDeployments.Count > 0) +{ + foreach (var kvp in defaults.ModelDeployments) + { + Console.WriteLine($" {kvp.Key} → {kvp.Value}"); + } +} +else +{ + Console.WriteLine(" No model deployments configured yet."); +} +``` + +## Next steps + +After configuring model deployments, you can use prebuilt analyzers. See: +- [Sample 01: Analyze a document from binary data][sample01] to analyze PDF files +- [Sample 02: Analyze a document from URL][sample02] to analyze documents from URLs + +## Learn more + +- [Content Understanding Documentation][cu-docs] +- [Model Deployment Configuration][model-deployment-docs] + +[README]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/README.md +[sample01]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample01_AnalyzeBinary.md +[sample02]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample02_AnalyzeUrl.md +[cu-docs]: https://learn.microsoft.com/azure/ai-services/content-understanding/ +[model-deployment-docs]: https://learn.microsoft.com/azure/ai-services/content-understanding/quickstart/use-rest-api?tabs=portal%2Cdocument + diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample01_AnalyzeBinary.md b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample01_AnalyzeBinary.md new file mode 100644 index 000000000000..dcf57af8ffb0 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample01_AnalyzeBinary.md @@ -0,0 +1,187 @@ +# Analyze a document from binary data + +This sample demonstrates how to analyze a PDF file from disk using the `prebuilt-documentSearch` analyzer. + +## About Content Understanding + +Content Understanding supports multiple content types: + +- **Documents** - Extract text, tables, figures, layout information, and structured markdown from PDFs, images, and Office documents +- **Images** - Analyze standalone images to generate descriptions, extract visual features, and identify objects and scenes within images +- **Audio** - Transcribe audio content with speaker diarization, timing information, and conversation summaries +- **Video** - Analyze video content with visual frame extraction, audio transcription, and structured summaries + +This sample focuses on **document analysis**. For image, audio, and video analysis examples, see other samples in the [samples directory][samples-directory]. + +## Prerequisites + +To get started you'll need a **Microsoft Foundry resource**. See [README][README] for prerequisites and instructions. + +### āš ļø IMPORTANT: Configure model deployments first + +> **Before using prebuilt analyzers, you MUST configure model deployments for your Microsoft Foundry resource.** This is a **one-time setup per resource** that maps your deployed large language models to the models required by the prebuilt analyzers. Currently, Content Understanding uses OpenAI GPT models. This configuration is persisted in your Microsoft Foundry resource, so you only need to run this once per resource (or whenever you change your deployment names). + +The `prebuilt-documentSearch` analyzer requires **gpt-4.1-mini** and **text-embedding-3-large** model deployments. See the [README][README] for detailed instructions on configuring model deployments. + +## Prebuilt analyzers + +Content Understanding provides prebuilt analyzers that are ready to use without any configuration. These analyzers use the `*Search` naming pattern: + +- **`prebuilt-documentSearch`** - Extracts content from documents (PDF, images, Office documents) with layout preservation, table detection, figure analysis, and structured markdown output. Optimized for RAG scenarios. +- **`prebuilt-imageSearch`** - Analyzes standalone images to generate descriptions, extract visual features, and identify objects and scenes within images. Optimized for image understanding and search scenarios. Note: Image analysis is not optimized for text extraction; use `prebuilt-documentSearch` for documents containing text. +- **`prebuilt-audioSearch`** - Transcribes audio content with speaker diarization, timing information, and conversation summaries. Supports multilingual transcription. +- **`prebuilt-videoSearch`** - Analyzes video content with visual frame extraction, audio transcription, and structured summaries. Provides temporal alignment of visual and audio content. + +This sample uses **`prebuilt-documentSearch`** to extract structured content from PDF documents. + +## Creating a `ContentUnderstandingClient` + +To create a new `ContentUnderstandingClient` you need the endpoint and credentials from your resource. You can authenticate using either `DefaultAzureCredential` (recommended) or an API key. + +You can set `endpoint` based on an environment variable, a configuration setting, or any way that works for your application. + +### Using DefaultAzureCredential (recommended) + +The simplest way to authenticate is using `DefaultAzureCredential`, which supports multiple authentication methods and works well in both local development and production environments: + +```C# Snippet:CreateContentUnderstandingClient +string endpoint = ""; +var credential = new DefaultAzureCredential(); +var client = new ContentUnderstandingClient(new Uri(endpoint), credential); +``` + +### Using API key + +> **āš ļø Security Warning:** API key authentication is **not secure** for production use. API keys are sensitive credentials that should not be hardcoded or committed to source control. This method is **only recommended for testing purposes with test resources**. For production applications, use `DefaultAzureCredential` or other Microsoft Entra ID-based authentication methods. + +You can authenticate using an API key from your Microsoft Foundry resource: + +```C# Snippet:CreateContentUnderstandingClientApiKey +string endpoint = ""; +string apiKey = ""; +var client = new ContentUnderstandingClient(new Uri(endpoint), new AzureKeyCredential(apiKey)); +``` + +## Analyze a document from binary data + +The `prebuilt-documentSearch` analyzer transforms unstructured documents into structured, machine-readable data optimized for RAG scenarios. + +To analyze a document from binary data, use the `AnalyzeBinaryAsync` method. The returned value is an `AnalyzeResult` object containing data about the submitted document. Since we're analyzing a document, we'll pass the analyzer ID `prebuilt-documentSearch` to the method. + +> **Note:** Content Understanding operations are asynchronous long-running operations. The SDK handles polling automatically when using `WaitUntil.Completed`. + +```C# Snippet:ContentUnderstandingAnalyzeBinaryAsync +string filePath = ""; +byte[] fileBytes = File.ReadAllBytes(filePath); +BinaryData binaryData = BinaryData.FromBytes(fileBytes); + +AnalyzeResultOperation operation = await client.AnalyzeBinaryAsync( + WaitUntil.Completed, + "prebuilt-documentSearch", + "application/pdf", + binaryData); + +AnalyzeResult result = operation.Value; +``` + +## Extract markdown content + +The most common use case for document analysis is extracting markdown content, which is optimized for RAG (Retrieval-Augmented Generation) scenarios. Markdown provides a structured, searchable representation of the document that preserves layout, formatting, and hierarchy while being easily consumable by AI models and search systems. + +The `prebuilt-documentSearch` analyzer extracts: + +1. **Content Analysis** - Text (printed and handwritten), selection marks, barcodes, mathematical formulas, hyperlinks, and annotations +2. **Figure Analysis** - Descriptions for images/charts/diagrams, converts charts to Chart.js syntax, and diagrams to Mermaid.js syntax +3. **Structure Analysis** - Paragraphs with contextual roles, tables with complex layouts, and hierarchical sections +4. **GitHub Flavored Markdown** - Richly formatted markdown that preserves document structure + +```C# Snippet:ContentUnderstandingExtractMarkdown +// A PDF file has only one content element even if it contains multiple pages +MediaContent? content = null; +if (result.Contents == null || result.Contents.Count == 0) +{ + Console.WriteLine("(No content returned from analysis)"); +} +else +{ + content = result.Contents.First(); + if (!string.IsNullOrEmpty(content.Markdown)) + { + Console.WriteLine(content.Markdown); + } + else + { + Console.WriteLine("(No markdown content available)"); + } +} +``` + +The markdown output includes structured text with preserved formatting and hierarchy, table representations in markdown format, figure descriptions for images/charts/diagrams, and layout preservation maintaining document structure. + +For more information about the markdown format, see [Document Markdown][cu-document-markdown]. + +## Access document properties with type-safe APIs + +The SDK provides type-safe access to extraction results. Since we're analyzing a PDF document, the content is a `DocumentContent` type, which provides strongly-typed access to document-specific properties. The extraction results are very rich and include many more properties than shown here. The following examples demonstrate just a few ways to access document properties, page information, and structural information like tables. For detailed information about all available document elements and properties, see [Document Elements][cu-document-elements]. + +```C# Snippet:ContentUnderstandingAccessDocumentProperties +// Check if this is document content to access document-specific properties +if (content is DocumentContent documentContent) +{ + Console.WriteLine($"Document type: {documentContent.MimeType ?? "(unknown)"}"); + Console.WriteLine($"Start page: {documentContent.StartPageNumber}"); + Console.WriteLine($"End page: {documentContent.EndPageNumber}"); + Console.WriteLine($"Total pages: {documentContent.EndPageNumber - documentContent.StartPageNumber + 1}"); + + // Check for pages + if (documentContent.Pages != null && documentContent.Pages.Count > 0) + { + Console.WriteLine($"Number of pages: {documentContent.Pages.Count}"); + foreach (var page in documentContent.Pages) + { + var unit = documentContent.Unit?.ToString() ?? "units"; + Console.WriteLine($" Page {page.PageNumber}: {page.Width} x {page.Height} {unit}"); + } + } + + // Check for tables + if (documentContent.Tables != null && documentContent.Tables.Count > 0) + { + Console.WriteLine($"Number of tables: {documentContent.Tables.Count}"); + int tableCounter = 1; + foreach (var table in documentContent.Tables) + { + Console.WriteLine($" Table {tableCounter}: {table.RowCount} rows x {table.ColumnCount} columns"); + tableCounter++; + } + } +} +``` + +## Next steps + +- **[Sample02_AnalyzeUrl][sample02-analyze-url]** - Learn how to analyze documents from publicly accessible URLs +- Explore other samples in the [samples directory][samples-directory] for more advanced scenarios + +## Learn more + +- **[Content Understanding Overview][cu-overview]** - Comprehensive introduction to the service +- **[What's New][cu-whats-new]** - Latest features and updates +- **[Document Overview][cu-document-overview]** - Document analysis capabilities and use cases +- **[Document Markdown][cu-document-markdown]** - Markdown format and structure for document content +- **[Document Elements][cu-document-elements]** - Detailed documentation on document extraction +- **[Audio Overview][cu-audio-overview]** - Audio capabilities and markdown format +- **[Video Overview][cu-video-overview]** - Video capabilities and elements +- **[Image Overview][cu-image-overview]** - Image analysis capabilities + +[README]: https://github.com/Azure/azure-sdk-for-net/tree/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding#getting-started +[samples-directory]: https://github.com/Azure/azure-sdk-for-net/tree/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples +[sample02-analyze-url]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample02_AnalyzeUrl.md +[cu-overview]: https://learn.microsoft.com/azure/ai-services/content-understanding/overview +[cu-whats-new]: https://learn.microsoft.com/azure/ai-services/content-understanding/whats-new +[cu-document-overview]: https://learn.microsoft.com/azure/ai-services/content-understanding/document/overview +[cu-document-markdown]: https://learn.microsoft.com/azure/ai-services/content-understanding/document/markdown +[cu-document-elements]: https://learn.microsoft.com/azure/ai-services/content-understanding/document/elements +[cu-audio-overview]: https://learn.microsoft.com/azure/ai-services/content-understanding/audio/overview +[cu-video-overview]: https://learn.microsoft.com/azure/ai-services/content-understanding/video/overview +[cu-image-overview]: https://learn.microsoft.com/azure/ai-services/content-understanding/image/overview diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample02_AnalyzeUrl.md b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample02_AnalyzeUrl.md new file mode 100644 index 000000000000..a91cd4d7019b --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample02_AnalyzeUrl.md @@ -0,0 +1,54 @@ +# Analyze a document from a URL + +This sample demonstrates how to analyze a document from a URL using the `prebuilt-documentSearch` analyzer. + +> **Before you begin**: This sample builds on concepts introduced in [Sample01_AnalyzeBinary][sample01-analyze-binary]. If you're new to Content Understanding, start with that sample to learn about: +> - Prerequisites and model deployment configuration +> - Creating a `ContentUnderstandingClient` with authentication +> - Extracting markdown content from analysis results +> - Accessing document properties with type-safe APIs + +## What's different from Sample01 + +This sample shows how to analyze a document from a **publicly accessible URL** instead of a local file. The main difference is using `AnalyzeAsync` with `AnalyzeInput` instead of `AnalyzeBinaryAsync`. + +## Analyze a document from a URL + +To analyze a document from a URL, use the `AnalyzeAsync` method with an `AnalyzeInput` that specifies the document URL. The returned value is an `AnalyzeResult` object containing data about the submitted document. + +> **Note:** Content Understanding operations are asynchronous long-running operations. The SDK handles polling automatically when using `WaitUntil.Completed`. + +```C# Snippet:ContentUnderstandingAnalyzeUrlAsync +Uri uriSource = new Uri(""); +Operation operation = await client.AnalyzeAsync( + WaitUntil.Completed, + "prebuilt-documentSearch", + inputs: new[] { new AnalyzeInput { Url = uriSource } }); + +AnalyzeResult result = operation.Value; +``` + +After getting the result, you can extract markdown content and access document properties using the same code patterns shown in [Sample01_AnalyzeBinary][sample01-analyze-binary]. The result structure is identical regardless of whether you analyze from binary data or a URL. + +The generated sample includes code for extracting markdown and accessing document properties (using the same snippets as Sample01), but this markdown focuses on the URL-specific analysis method. + +## Next steps + +- Try analyzing different document types (images, Office documents) from URLs +- Explore other samples in the [samples directory][samples-directory] for more advanced scenarios +- Learn about creating custom analyzers and classifiers + +## Learn more + +- **[Sample01_AnalyzeBinary][sample01-analyze-binary]** - Learn the basics of document analysis, authentication, and result processing +- **[Content Understanding Overview][cu-overview]** - Comprehensive introduction to the service +- **[Document Overview][cu-document-overview]** - Document analysis capabilities and use cases +- **[Document Markdown][cu-document-markdown]** - Markdown format and structure for document content +- **[Document Elements][cu-document-elements]** - Detailed documentation on document extraction + +[sample01-analyze-binary]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample01_AnalyzeBinary.md +[samples-directory]: https://github.com/Azure/azure-sdk-for-net/tree/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples +[cu-overview]: https://learn.microsoft.com/azure/ai-services/content-understanding/overview +[cu-document-overview]: https://learn.microsoft.com/azure/ai-services/content-understanding/document/overview +[cu-document-markdown]: https://learn.microsoft.com/azure/ai-services/content-understanding/document/markdown +[cu-document-elements]: https://learn.microsoft.com/azure/ai-services/content-understanding/document/elements diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample03_AnalyzeInvoice.md b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample03_AnalyzeInvoice.md new file mode 100644 index 000000000000..650dc24b3508 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample03_AnalyzeInvoice.md @@ -0,0 +1,196 @@ +# Analyze an invoice using prebuilt analyzer + +This sample demonstrates how to analyze an invoice from a URL using the `prebuilt-invoice` analyzer and extract structured fields from the result. + +## Before you begin + +This sample builds on concepts introduced in previous samples: +- [Sample 00: Configure model deployment defaults][sample00] - Required setup before using prebuilt analyzers +- [Sample 02: Analyze a document from URL][sample02] - Basic URL-based analysis + +## About prebuilt analyzers + +Content Understanding provides **70+ production-ready prebuilt analyzers** that are ready to use without any training or configuration. These analyzers are powered by rich knowledge bases of thousands of real-world document examples, enabling them to understand document structure and adapt to variations in format and content. + +Prebuilt analyzers are ideal for: +- **Content ingestion** in search and retrieval-augmented generation (RAG) workflows +- **Intelligent document processing (IDP)** to extract structured data from common document types +- **Agentic flows** as tools for extracting structured representations from input files + +### The `prebuilt-invoice` analyzer + +The `prebuilt-invoice` analyzer is a domain-specific analyzer optimized for processing invoices, utility bills, sales orders, and purchase orders. It automatically extracts structured fields including: + +- **Customer/Vendor information**: Name, address, contact details +- **Invoice metadata**: Invoice number, date, due date, purchase order number +- **Line items**: Description, quantity, unit price, total for each item +- **Financial totals**: Subtotal, tax amount, shipping charges, total amount +- **Payment information**: Payment terms, payment method, remittance address + +The analyzer works out of the box with various invoice formats and requires no configuration. It's part of the **financial documents** category of prebuilt analyzers, which also includes: +- `prebuilt-receipt` - Sales receipts from retail and dining establishments +- `prebuilt-creditCard` - Credit card statements +- `prebuilt-bankStatement.us` - US bank statements +- `prebuilt-check.us` - US bank checks +- `prebuilt-creditMemo` - Credit memos and refund documents + +For a complete list of available prebuilt analyzers, see the [Prebuilt Analyzers documentation][prebuilt-analyzers-docs]. + +## Prerequisites + +To get started you'll need a **Microsoft Foundry resource** with model deployments configured. See [Sample 00][sample00] for setup instructions. + +## Creating a `ContentUnderstandingClient` + +See [Sample 01][sample01] for authentication examples using `DefaultAzureCredential` or API key. + +## Analyze invoice from URL + +Analyze an invoice from a URL using the `prebuilt-invoice` analyzer: + +```C# Snippet:ContentUnderstandingAnalyzeInvoice +Uri invoiceUrl = new Uri(""); +Operation operation = await client.AnalyzeAsync( + WaitUntil.Completed, + "prebuilt-invoice", + inputs: new[] { new AnalyzeInput { Url = invoiceUrl } }); + +AnalyzeResult result = operation.Value; +``` + +## Extract invoice fields + +The `prebuilt-invoice` analyzer returns structured fields that you can access type-safely. Each field includes metadata such as confidence scores and source information: + +```C# Snippet:ContentUnderstandingExtractInvoiceFields +// Get the document content (invoices are documents) +if (result.Contents?.FirstOrDefault() is DocumentContent documentContent) +{ + // Print document unit information + // The unit indicates the measurement system used for coordinates in the source field + Console.WriteLine($"Document unit: {documentContent.Unit ?? "unknown"}"); + Console.WriteLine($"Pages: {documentContent.StartPageNumber} to {documentContent.EndPageNumber}"); + Console.WriteLine(); + + // Extract simple string fields + var customerNameField = documentContent["CustomerName"]; + var invoiceDateField = documentContent["InvoiceDate"]; + + var customerName = customerNameField?.Value?.ToString(); + var invoiceDate = invoiceDateField?.Value?.ToString(); + + Console.WriteLine($"Customer Name: {customerName ?? "(None)"}"); + if (customerNameField != null) + { + Console.WriteLine($" Confidence: {customerNameField.Confidence?.ToString("F2") ?? "N/A"}"); + // Source is an encoded identifier containing bounding box coordinates + // Format: D(pageNumber, x1, y1, x2, y2, x3, y3, x4, y4) + // Coordinates are in the document's unit (e.g., inches for US documents) + Console.WriteLine($" Source: {customerNameField.Source ?? "N/A"}"); + if (customerNameField.Spans != null && customerNameField.Spans.Count > 0) + { + var span = customerNameField.Spans[0]; + Console.WriteLine($" Position in markdown: offset={span.Offset}, length={span.Length}"); + } + } + + Console.WriteLine($"Invoice Date: {invoiceDate ?? "(None)"}"); + if (invoiceDateField != null) + { + Console.WriteLine($" Confidence: {invoiceDateField.Confidence?.ToString("F2") ?? "N/A"}"); + Console.WriteLine($" Source: {invoiceDateField.Source ?? "N/A"}"); + if (invoiceDateField.Spans != null && invoiceDateField.Spans.Count > 0) + { + var span = invoiceDateField.Spans[0]; + Console.WriteLine($" Position in markdown: offset={span.Offset}, length={span.Length}"); + } + } + + // Extract object fields (nested structures) + if (documentContent["TotalAmount"] is ObjectField totalAmountObj) + { + var amount = totalAmountObj["Amount"]?.Value as double?; + var currency = totalAmountObj["CurrencyCode"]?.Value?.ToString(); + Console.WriteLine($"Total: {currency ?? "$"}{amount?.ToString("F2") ?? "(None)"}"); + if (totalAmountObj.Confidence.HasValue) + { + Console.WriteLine($" Confidence: {totalAmountObj.Confidence.Value:F2}"); + } + if (!string.IsNullOrEmpty(totalAmountObj.Source)) + { + Console.WriteLine($" Source: {totalAmountObj.Source}"); + } + } + + // Extract array fields (collections like line items) + if (documentContent["LineItems"] is ArrayField lineItems) + { + Console.WriteLine($"Line Items ({lineItems.Count}):"); + for (int i = 0; i < lineItems.Count; i++) + { + if (lineItems[i] is ObjectField item) + { + var description = item["Description"]?.Value?.ToString(); + var quantity = item["Quantity"]?.Value as double?; + Console.WriteLine($" Item {i + 1}: {description ?? "N/A"} (Qty: {quantity?.ToString() ?? "N/A"})"); + if (item.Confidence.HasValue) + { + Console.WriteLine($" Confidence: {item.Confidence.Value:F2}"); + } + } + } + } +} +``` + +### Understanding field metadata + +Each extracted field provides metadata to help you understand the extraction quality: + +- **Confidence**: A float value between 0.0 and 1.0 indicating how certain the analyzer is about the extracted value. Higher values indicate higher confidence. Use this to filter or flag low-confidence extractions for manual review. +- **Source**: An encoded identifier that contains bounding box coordinates identifying the position of the field value in the original document. The format is `D(pageNumber, x1, y1, x2, y2, x3, y3, x4, y4)` where: + - `pageNumber`: The page number (1-indexed) where the field was found + - `x1, y1, x2, y2, x3, y3, x4, y4`: The four corner coordinates of the bounding box + - Coordinates are in the document's unit (typically "inch" for US documents, as indicated by `DocumentContent.Unit`) + + For example, a source value like `D(1,1.265,1.0836,2.4972,1.0816,2.4964,1.4117,1.2645,1.4117)` indicates: + - Page 1 + - Bounding box with corners at (1.265, 1.0836), (2.4972, 1.0816), (2.4964, 1.4117), and (1.2645, 1.4117) + - All coordinates are in inches (since `DocumentContent.Unit` is "inch") + + The source can be used to trace back to the exact location where the value was found in the original document. For more information, see the [Source documentation][source-docs]. +- **Spans**: A list of `ContentSpan` objects that indicate the position of the field value in the markdown content. Each span contains: + - `Offset`: The starting position (0-indexed) in characters + - `Length`: The length of the text in characters + +These metadata properties are available on all field types (`StringField`, `NumberField`, `DateField`, `ObjectField`, `ArrayField`, etc.). + +### Document unit + +The `DocumentContent.Unit` property indicates the measurement system used for coordinates in the `Source` field. For US documents, this is typically "inch", meaning all bounding box coordinates in the source field are measured in inches. This allows you to precisely locate extracted values in the original document. + +For more details about `DocumentContent` and all available document elements (pages, paragraphs, tables, figures, etc.), see the [Document Elements documentation][document-elements-docs]. + +## Next steps + +- [Sample 04: Create a custom analyzer][sample04] - Learn how to create custom analyzers +- [Sample 05: Create and use a classifier][sample05] - Learn about classifiers + +## Learn more + +- [Content Understanding Documentation][cu-docs] +- [Document Elements Documentation][document-elements-docs] - Detailed information about `DocumentContent` and all available document elements (pages, paragraphs, tables, figures, etc.) +- [Prebuilt Analyzers Documentation][prebuilt-analyzers-docs] - Complete list of 70+ prebuilt analyzers +- [Financial Documents][financial-docs] - Overview of financial document analyzers + +[sample00]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample00_ConfigureDefaults.md +[sample01]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample01_AnalyzeBinary.md +[sample02]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample02_AnalyzeUrl.md +[sample04]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample04_CreateAnalyzer.md +[sample05]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample05_CreateClassifier.md +[cu-docs]: https://learn.microsoft.com/azure/ai-services/content-understanding/ +[document-elements-docs]: https://learn.microsoft.com/azure/ai-services/content-understanding/document/elements +[prebuilt-analyzers-docs]: https://learn.microsoft.com/azure/ai-services/content-understanding/concepts/prebuilt-analyzers +[financial-docs]: https://learn.microsoft.com/azure/ai-services/content-understanding/concepts/prebuilt-analyzers#financial-documents +[source-docs]: https://learn.microsoft.com/azure/ai-services/content-understanding/document/elements#source + diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample04_CreateAnalyzer.md b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample04_CreateAnalyzer.md new file mode 100644 index 000000000000..b3371325d2c6 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample04_CreateAnalyzer.md @@ -0,0 +1,254 @@ +# Create a custom analyzer + +This sample demonstrates how to create a custom analyzer with a field schema to extract structured data from documents. + +## Before you begin + +This sample builds on concepts introduced in previous samples: +- [Sample 00: Configure model deployment defaults][sample00] - Required setup before creating custom analyzers +- [Sample 01: Analyze a document from binary data][sample01] - Basic analysis concepts + +## About custom analyzers + +Custom analyzers allow you to define a field schema that specifies what structured data to extract from documents. You can: +- Define custom fields (string, number, date, object, array) +- Specify extraction methods to control how field values are extracted (see [method][method-docs] for details): + - **`extract`** - Values are extracted as they appear in the content (best for literal text extraction from specific locations). Requires `estimateSourceAndConfidence` to be set to `true` for the field. + - **`generate`** - Values are generated freely based on the content using AI models (best for complex or variable fields requiring interpretation) + - **`classify`** - Values are classified against a predefined set of categories (best when using `enum` with a fixed set of possible values) + + When not specified, the system automatically determines the best method based on the field type and description. For more details, see the [Analyzer Reference documentation][analyzer-reference-docs]. +- Use prebuilt analyzers as a base (see [baseAnalyzerId][baseanalyzerid-docs] for details). Supported base analyzers include: + - `prebuilt-document` - for document-based custom analyzers + - `prebuilt-audio` - for audio-based custom analyzers + - `prebuilt-video` - for video-based custom analyzers + - `prebuilt-image` - for image-based custom analyzers + + For the complete and up-to-date list of supported base analyzers, see the [Analyzer Reference documentation][analyzer-reference-docs]. +- Configure analysis options (OCR, layout, formulas) +- Enable source and confidence tracking: Set `estimateFieldSourceAndConfidence` to `true` at the analyzer level (in `ContentAnalyzerConfig`) or `estimateSourceAndConfidence` to `true` at the field level to get source location (page number, bounding box) and confidence scores for extracted field values. This is required for fields with `method` = `extract` and is useful for validation, quality assurance, debugging, and highlighting source text in user interfaces. Field-level settings override analyzer-level settings. For more information, see [estimateSourceAndConfidence][estimate-source-confidence-docs]. + +## Prerequisites + +To get started you'll need a **Microsoft Foundry resource** with model deployments configured. See [Sample 00][sample00] for setup instructions. + +## Creating a `ContentUnderstandingClient` + +See [Sample 01][sample01] for authentication examples using `DefaultAzureCredential` or API key. + +## Create a custom analyzer + +Create a custom analyzer with a field schema: + +```C# Snippet:ContentUnderstandingCreateAnalyzer +// Generate a unique analyzer ID +string analyzerId = $"my_custom_analyzer_{DateTimeOffset.UtcNow.ToUnixTimeSeconds()}"; + +// Define field schema with custom fields +// This example demonstrates three extraction methods: +// - extract: Literal text extraction (requires estimateSourceAndConfidence) +// - generate: AI-generated values based on content interpretation +// - classify: Classification against predefined categories +var fieldSchema = new ContentFieldSchema( + new Dictionary + { + ["company_name"] = new ContentFieldDefinition + { + Type = ContentFieldType.String, + Method = GenerationMethod.Extract, + Description = "Name of the company" + }, + ["total_amount"] = new ContentFieldDefinition + { + Type = ContentFieldType.Number, + Method = GenerationMethod.Extract, + Description = "Total amount on the document" + }, + ["document_summary"] = new ContentFieldDefinition + { + Type = ContentFieldType.String, + Method = GenerationMethod.Generate, + Description = "A brief summary of the document content" + }, + ["document_type"] = new ContentFieldDefinition + { + Type = ContentFieldType.String, + Method = GenerationMethod.Classify, + Description = "Type of document" + } + }) +{ + Name = "company_schema", + Description = "Schema for extracting company information" +}; + +// Add enum values for the classify field +fieldSchema.Fields["document_type"].Enum.Add("invoice"); +fieldSchema.Fields["document_type"].Enum.Add("receipt"); +fieldSchema.Fields["document_type"].Enum.Add("contract"); +fieldSchema.Fields["document_type"].Enum.Add("report"); +fieldSchema.Fields["document_type"].Enum.Add("other"); + +// Create analyzer configuration +var config = new ContentAnalyzerConfig +{ + EnableFormula = true, + EnableLayout = true, + EnableOcr = true, + EstimateFieldSourceAndConfidence = true, + ReturnDetails = true +}; + +// Create the custom analyzer +var customAnalyzer = new ContentAnalyzer +{ + BaseAnalyzerId = "prebuilt-document", + Description = "Custom analyzer for extracting company information", + Config = config, + FieldSchema = fieldSchema +}; + +// Add model mappings (required for custom analyzers) +customAnalyzer.Models.Add("completion", "gpt-4.1"); +customAnalyzer.Models.Add("embedding", "text-embedding-3-large"); + +// Create the analyzer +var operation = await client.CreateAnalyzerAsync( + WaitUntil.Completed, + analyzerId, + customAnalyzer, + allowReplace: true); + +ContentAnalyzer result = operation.Value; +Console.WriteLine($"Analyzer '{analyzerId}' created successfully!"); +``` + +## Use the custom analyzer + +After creating the analyzer, you can use it to analyze documents. **In production applications, analyzers are typically created once and reused for multiple document analyses.** They persist in your Content Understanding resource until explicitly deleted. + +```C# Snippet:ContentUnderstandingUseCustomAnalyzer +var documentUrl = new Uri(""); +// Analyze a document using the custom analyzer +var analyzeOperation = await client.AnalyzeAsync( + WaitUntil.Completed, + analyzerId, + inputs: new[] { new AnalyzeInput { Url = documentUrl } }); + +var analyzeResult = analyzeOperation.Value; + +// Extract custom fields from the result +// Since EstimateFieldSourceAndConfidence is enabled, we can access confidence scores and source information +if (analyzeResult.Contents?.FirstOrDefault() is DocumentContent content) +{ + // Extract field (literal text extraction) + if (content.Fields.TryGetValue("company_name", out var companyNameField)) + { + var companyName = companyNameField is StringField sf ? sf.ValueString : null; + Console.WriteLine($"Company Name (extract): {companyName ?? "(not found)"}"); + if (companyNameField != null) + { + Console.WriteLine($" Confidence: {companyNameField.Confidence?.ToString("F2") ?? "N/A"}"); + Console.WriteLine($" Source: {companyNameField.Source ?? "N/A"}"); + if (companyNameField.Spans != null && companyNameField.Spans.Count > 0) + { + var span = companyNameField.Spans[0]; + Console.WriteLine($" Position in markdown: offset={span.Offset}, length={span.Length}"); + } + } + } + + // Extract field (literal text extraction) + if (content.Fields.TryGetValue("total_amount", out var totalAmountField)) + { + var totalAmount = totalAmountField is NumberField nf ? nf.ValueNumber : null; + Console.WriteLine($"Total Amount (extract): {totalAmount?.ToString("F2") ?? "(not found)"}"); + if (totalAmountField != null) + { + Console.WriteLine($" Confidence: {totalAmountField.Confidence?.ToString("F2") ?? "N/A"}"); + Console.WriteLine($" Source: {totalAmountField.Source ?? "N/A"}"); + if (totalAmountField.Spans != null && totalAmountField.Spans.Count > 0) + { + var span = totalAmountField.Spans[0]; + Console.WriteLine($" Position in markdown: offset={span.Offset}, length={span.Length}"); + } + } + } + + // Generate field (AI-generated value) + if (content.Fields.TryGetValue("document_summary", out var summaryField)) + { + var summary = summaryField is StringField sf ? sf.ValueString : null; + Console.WriteLine($"Document Summary (generate): {summary ?? "(not found)"}"); + if (summaryField != null) + { + Console.WriteLine($" Confidence: {summaryField.Confidence?.ToString("F2") ?? "N/A"}"); + // Note: Generated fields may not have source information + if (!string.IsNullOrEmpty(summaryField.Source)) + { + Console.WriteLine($" Source: {summaryField.Source}"); + } + } + } + + // Classify field (classification against predefined categories) + if (content.Fields.TryGetValue("document_type", out var documentTypeField)) + { + var documentType = documentTypeField is StringField sf ? sf.ValueString : null; + Console.WriteLine($"Document Type (classify): {documentType ?? "(not found)"}"); + if (documentTypeField != null) + { + Console.WriteLine($" Confidence: {documentTypeField.Confidence?.ToString("F2") ?? "N/A"}"); + // Note: Classified fields may not have source information + if (!string.IsNullOrEmpty(documentTypeField.Source)) + { + Console.WriteLine($" Source: {documentTypeField.Source}"); + } + } + } +} +``` + +## Delete the analyzer (optional) + +**Note:** In production code, you typically keep analyzers and reuse them for multiple analyses. Deletion is mainly useful for: +- Testing and development cleanup +- Removing analyzers that are no longer needed +- Managing resource quotas + +If you need to delete an analyzer (for example, in test cleanup), you can do so as follows: + +```C# Snippet:ContentUnderstandingDeleteCreatedAnalyzer +// Clean up: delete the analyzer (for testing purposes only) +// In production, analyzers are typically kept and reused +await client.DeleteAnalyzerAsync(analyzerId); +Console.WriteLine($"Analyzer '{analyzerId}' deleted successfully."); +``` + +## Next steps + +- [Sample 06: Get analyzer information][sample06] - Learn how to retrieve analyzer details +- [Sample 07: List analyzers][sample07] - Learn how to list all analyzers +- [Sample 08: Update analyzer][sample08] - Learn how to update an existing analyzer +- [Sample 09: Delete analyzer][sample09] - Learn how to delete an analyzer + +## Learn more + +- [Content Understanding Documentation][cu-docs] +- [Analyzer Reference Documentation][analyzer-reference-docs] - Complete reference for analyzer configuration, extraction methods, and field schemas +- [baseAnalyzerId][baseanalyzerid-docs] - Learn about supported base analyzers for custom analyzers +- [method][method-docs] - Learn about extraction methods (extract, generate, classify) +- [estimateSourceAndConfidence][estimate-source-confidence-docs] - Learn about source location and confidence score tracking for extracted fields + +[sample00]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample00_ConfigureDefaults.md +[sample01]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample01_AnalyzeBinary.md +[sample06]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample06_GetAnalyzer.md +[sample07]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample07_ListAnalyzers.md +[sample08]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample08_UpdateAnalyzer.md +[sample09]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample09_DeleteAnalyzer.md +[cu-docs]: https://learn.microsoft.com/azure/ai-services/content-understanding/ +[analyzer-reference-docs]: https://learn.microsoft.com/azure/ai-services/content-understanding/concepts/analyzer-reference +[baseanalyzerid-docs]: https://learn.microsoft.com/azure/ai-services/content-understanding/concepts/analyzer-reference#baseanalyzerid +[method-docs]: https://learn.microsoft.com/azure/ai-services/content-understanding/concepts/analyzer-reference#method +[estimate-source-confidence-docs]: https://learn.microsoft.com/azure/ai-services/content-understanding/concepts/analyzer-reference#estimatesourceandconfidence + diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample05_CreateClassifier.md b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample05_CreateClassifier.md new file mode 100644 index 000000000000..ed3ac065ee0b --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample05_CreateClassifier.md @@ -0,0 +1,210 @@ +# Create and use a classifier + +This sample demonstrates how to create a classifier analyzer to categorize documents and use it to analyze documents with and without automatic segmentation. + +## Before you begin + +This sample builds on concepts introduced in previous samples: +- [Sample 00: Configure model deployment defaults][sample00] - Required setup before creating custom analyzers +- [Sample 04: Create a custom analyzer][sample04] - Basic custom analyzer concepts + +## About classifiers + +Classifiers are a type of custom analyzer that categorize documents into predefined categories. They're useful for: +- **Document routing**: Automatically route documents to the right processing pipeline based on category +- **Content organization**: Organize large document collections by type +- **Multi-document processing**: Process files containing multiple document types by automatically segmenting them + +Classifiers use **content categories** to define the types of documents they can identify. Each category has a description that helps the analyzer understand what documents belong to that category. + +## Prerequisites + +To get started you'll need a **Microsoft Foundry resource** with model deployments configured. See [Sample 00][sample00] for setup instructions. + +## Creating a `ContentUnderstandingClient` + +See [Sample 01][sample01] for authentication examples using `DefaultAzureCredential` or API key. + +## Create a classifier + +Create a classifier analyzer with content categories: + +```C# Snippet:ContentUnderstandingCreateClassifier +// Define content categories for classification +var categories = new Dictionary +{ + ["Loan_Application"] = new ContentCategory + { + Description = "Documents submitted by individuals or businesses to request funding, typically including personal or business details, financial history, loan amount, purpose, and supporting documentation." + }, + ["Invoice"] = new ContentCategory + { + Description = "Billing documents issued by sellers or service providers to request payment for goods or services, detailing items, prices, taxes, totals, and payment terms." + }, + ["Bank_Statement"] = new ContentCategory + { + Description = "Official statements issued by banks that summarize account activity over a period, including deposits, withdrawals, fees, and balances." + } +}; + +// Create analyzer configuration +var config = new ContentAnalyzerConfig +{ + ReturnDetails = true, + EnableSegment = true // Enable automatic segmentation by category +}; + +// Add categories to config +foreach (var kvp in categories) +{ + config.ContentCategories.Add(kvp.Key, kvp.Value); +} + +// Create the classifier analyzer +var classifier = new ContentAnalyzer +{ + BaseAnalyzerId = "prebuilt-document", + Description = "Custom classifier for financial document categorization", + Config = config +}; +classifier.Models.Add("completion", "gpt-4.1"); + +// Create the classifier +string analyzerId = $"my_classifier_{DateTimeOffset.UtcNow.ToUnixTimeSeconds()}"; +var operation = await client.CreateAnalyzerAsync( + WaitUntil.Completed, + analyzerId, + classifier); + +ContentAnalyzer result = operation.Value; +Console.WriteLine($"Classifier '{analyzerId}' created successfully!"); +``` + +## Analyze documents without segmentation + +When `EnableSegment` is `false`, the entire document is classified as a single unit without splitting. For example, consider a multi-page PDF file like [`mixed_financial_docs.pdf`][mixed-docs-example] that contains: +- **Invoice**: page 1 +- **Bank Statement**: pages 2-3 +- **Loan Application**: page 4 + +With `EnableSegment = false`, the entire 4-page document will be classified as one category (e.g., "Invoice" or "Bank Statement") without splitting the document into separate segments: + +```C# Snippet:ContentUnderstandingAnalyzeCategory +// Analyze a document (EnableSegment=false means entire document is one category) +string filePath = ""; +byte[] fileBytes = File.ReadAllBytes(filePath); +AnalyzeResultOperation analyzeOperation = await client.AnalyzeBinaryAsync( + WaitUntil.Completed, + analyzerId, + "application/pdf", + BinaryData.FromBytes(fileBytes)); + +var analyzeResult = analyzeOperation.Value; + +// Display classification results +if (analyzeResult.Contents?.FirstOrDefault() is DocumentContent docContent) +{ + Console.WriteLine($"Pages: {docContent.StartPageNumber}-{docContent.EndPageNumber}"); + + // With EnableSegment=false, the document is classified as a single unit + if (docContent.Segments != null && docContent.Segments.Count > 0) + { + foreach (var segment in docContent.Segments) + { + Console.WriteLine($"Category: {segment.Category ?? "(unknown)"}"); + Console.WriteLine($"Pages: {segment.StartPageNumber}-{segment.EndPageNumber}"); + } + } +} +``` + +## Analyze documents with segmentation + +When `EnableSegment` is `true`, the analyzer automatically splits multi-document files into segments by category. For example, with [`mixed_financial_docs.pdf`][mixed-docs-example] that contains: +- **Invoice**: page 1 +- **Bank Statement**: pages 2-3 +- **Loan Application**: page 4 + +With `EnableSegment = true`, the analyzer will segment the document and return classification for each segment: +- Segment 1: Category "Invoice", Pages 1-1 +- Segment 2: Category "Bank Statement", Pages 2-3 +- Segment 3: Category "Loan Application", Page 4 + +```C# Snippet:ContentUnderstandingAnalyzeCategoryWithSegments +// Analyze a document (EnableSegment=true automatically segments by category) +string filePath = ""; +byte[] fileBytes = File.ReadAllBytes(filePath); +AnalyzeResultOperation analyzeOperation = await client.AnalyzeBinaryAsync( + WaitUntil.Completed, + analyzerId, + "application/pdf", + BinaryData.FromBytes(fileBytes)); + +var analyzeResult = analyzeOperation.Value; + +// Display classification results with automatic segmentation +if (analyzeResult.Contents?.FirstOrDefault() is DocumentContent docContent) +{ + if (docContent.Segments != null && docContent.Segments.Count > 0) + { + Console.WriteLine($"Found {docContent.Segments.Count} segment(s):"); + foreach (var segment in docContent.Segments) + { + Console.WriteLine($" Category: {segment.Category ?? "(unknown)"}"); + Console.WriteLine($" Pages: {segment.StartPageNumber}-{segment.EndPageNumber}"); + Console.WriteLine($" Segment ID: {segment.SegmentId ?? "(not available)"}"); + } + } +} +``` + +## Segmentation behavior + +The `EnableSegment` property controls how multi-document files are processed: + +- **`EnableSegment = false`**: The entire document is classified as one category without splitting. For example, with [`mixed_financial_docs.pdf`][mixed-docs-example] (4 pages containing invoice, bank statement, and loan application), the entire document will be classified as a single category. Useful when you know each file contains only one document type. + +- **`EnableSegment = true`**: The analyzer automatically splits the document into segments, with each segment having its own category. For example, with [`mixed_financial_docs.pdf`][mixed-docs-example], the analyzer will return three segments: + - Segment 1: "Invoice" (page 1) + - Segment 2: "Bank Statement" (pages 2-3) + - Segment 3: "Loan Application" (page 4) + + Useful for processing files that contain multiple document types. + +## Delete the classifier (optional) + +**Note:** In production code, you typically keep classifiers and reuse them for multiple analyses. Deletion is mainly useful for: +- Testing and development cleanup +- Removing classifiers that are no longer needed +- Managing resource quotas + +If you need to delete a classifier (for example, in test cleanup), you can do so as follows: + +```C# Snippet:ContentUnderstandingDeleteClassifier +// Clean up: delete the classifier (for testing purposes only) +// In production, classifiers are typically kept and reused +await client.DeleteAnalyzerAsync(analyzerId); +Console.WriteLine($"Classifier '{analyzerId}' deleted successfully."); +``` + +## Next steps + +- [Sample 06: Get analyzer information][sample06] - Learn how to retrieve analyzer details +- [Sample 07: List analyzers][sample07] - Learn how to list all analyzers +- [Sample 08: Update analyzer][sample08] - Learn how to update an existing analyzer + +## Learn more + +- [Content Understanding Documentation][cu-docs] +- [Classifiers Documentation][classifier-docs] + +[sample00]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample00_ConfigureDefaults.md +[sample01]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample01_AnalyzeBinary.md +[sample04]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample04_CreateAnalyzer.md +[sample06]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample06_GetAnalyzer.md +[sample07]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample07_ListAnalyzers.md +[sample08]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample08_UpdateAnalyzer.md +[cu-docs]: https://learn.microsoft.com/azure/ai-services/content-understanding/ +[classifier-docs]: https://learn.microsoft.com/azure/ai-services/content-understanding/concepts/classifier +[mixed-docs-example]: https://github.com/Azure-Samples/azure-ai-content-understanding-dotnet/blob/main/ContentUnderstanding.Common/data/mixed_financial_docs.pdf + diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample06_GetAnalyzer.md b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample06_GetAnalyzer.md new file mode 100644 index 000000000000..67033d5c9038 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample06_GetAnalyzer.md @@ -0,0 +1,157 @@ +# Get analyzer information + +This sample demonstrates how to retrieve information about analyzers, including prebuilt analyzers and custom analyzers. + +## Before you begin + +This sample builds on concepts introduced in previous samples: +- [Sample 04: Create a custom analyzer][sample04] - Understanding custom analyzers +- [Sample 05: Create and use a classifier][sample05] - Understanding classifiers + +## About getting analyzer information + +The `GetAnalyzerAsync` method allows you to retrieve detailed information about any analyzer, including: +- **Prebuilt analyzers**: System-provided analyzers like `prebuilt-documentSearch`, `prebuilt-invoice`, etc. +- **Custom analyzers**: Analyzers you've created with custom field schemas or classifiers + +This is useful for: +- **Verifying analyzer configuration**: Check the current state of an analyzer +- **Inspecting prebuilt analyzers**: Learn about available prebuilt analyzers and their capabilities +- **Debugging**: Understand why an analyzer behaves a certain way + +## Prerequisites + +To get started you'll need a **Microsoft Foundry resource** with model deployments configured. See [Sample 00][sample00] for setup instructions. + +## Creating a `ContentUnderstandingClient` + +See [Sample 01][sample01] for authentication examples using `DefaultAzureCredential` or API key. + +## Get prebuilt analyzer information + +Retrieve information about a prebuilt analyzer and display the full JSON: + +```C# Snippet:ContentUnderstandingGetPrebuiltAnalyzer +// Get information about a prebuilt analyzer +var response = await client.GetAnalyzerAsync("prebuilt-documentSearch"); +ContentAnalyzer analyzer = response.Value; + +// Display full analyzer JSON +var jsonOptions = new JsonSerializerOptions +{ + WriteIndented = true, + DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull +}; +string analyzerJson = JsonSerializer.Serialize(analyzer, jsonOptions); +Console.WriteLine("Prebuilt-documentSearch Analyzer:"); +Console.WriteLine(analyzerJson); +``` + +You can also get information about other prebuilt analyzers, such as `prebuilt-invoice`: + +```C# Snippet:ContentUnderstandingGetPrebuiltInvoice +// Get information about prebuilt-invoice analyzer +var invoiceResponse = await client.GetAnalyzerAsync("prebuilt-invoice"); +ContentAnalyzer invoiceAnalyzer = invoiceResponse.Value; + +// Display full analyzer JSON +var jsonOptions = new JsonSerializerOptions +{ + WriteIndented = true, + DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull +}; +string invoiceAnalyzerJson = JsonSerializer.Serialize(invoiceAnalyzer, jsonOptions); +Console.WriteLine("Prebuilt-invoice Analyzer:"); +Console.WriteLine(invoiceAnalyzerJson); +``` + +## Get custom analyzer information + +Create a custom analyzer, retrieve its information, and display the full JSON: + +```C# Snippet:ContentUnderstandingGetCustomAnalyzer +string endpoint = ""; +string apiKey = ""; // Set to null to use DefaultAzureCredential +var client = !string.IsNullOrEmpty(apiKey) + ? new ContentUnderstandingClient(new Uri(endpoint), new AzureKeyCredential(apiKey)) + : new ContentUnderstandingClient(new Uri(endpoint), new DefaultAzureCredential()); + +// Generate a unique analyzer ID +string analyzerId = $"my_custom_analyzer_{DateTimeOffset.UtcNow.ToUnixTimeSeconds()}"; + +// Define field schema with custom fields +var fieldSchema = new ContentFieldSchema( + new Dictionary + { + ["company_name"] = new ContentFieldDefinition + { + Type = ContentFieldType.String, + Method = GenerationMethod.Extract, + Description = "Name of the company" + } + }) +{ + Name = "test_schema", + Description = "Test schema for GetAnalyzer sample" +}; + +// Create analyzer configuration +var config = new ContentAnalyzerConfig +{ + ReturnDetails = true +}; + +// Create the custom analyzer +var analyzer = new ContentAnalyzer +{ + BaseAnalyzerId = "prebuilt-document", + Description = "Test analyzer for GetAnalyzer sample", + Config = config, + FieldSchema = fieldSchema +}; +analyzer.Models.Add("completion", "gpt-4.1"); + +// Create the analyzer +await client.CreateAnalyzerAsync( + WaitUntil.Completed, + analyzerId, + analyzer); + +try +{ + // Get information about the custom analyzer + var response = await client.GetAnalyzerAsync(analyzerId); + ContentAnalyzer retrievedAnalyzer = response.Value; + + // Display full analyzer JSON + var jsonOptions = new JsonSerializerOptions + { + WriteIndented = true, + DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull + }; + string analyzerJson = JsonSerializer.Serialize(retrievedAnalyzer, jsonOptions); + Console.WriteLine("Custom Analyzer:"); + Console.WriteLine(analyzerJson); +``` + +## Next steps + +- [Sample 07: List analyzers][sample07] - Learn how to list all analyzers +- [Sample 08: Update analyzer][sample08] - Learn how to update an existing analyzer +- [Sample 09: Delete analyzer][sample09] - Learn how to delete an analyzer + +## Learn more + +- [Content Understanding Documentation][cu-docs] +- [Prebuilt Analyzers Documentation][prebuilt-docs] + +[sample00]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample00_ConfigureDefaults.md +[sample01]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample01_AnalyzeBinary.md +[sample04]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample04_CreateAnalyzer.md +[sample05]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample05_CreateClassifier.md +[sample07]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample07_ListAnalyzers.md +[sample08]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample08_UpdateAnalyzer.md +[sample09]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample09_DeleteAnalyzer.md +[cu-docs]: https://learn.microsoft.com/azure/ai-services/content-understanding/ +[prebuilt-docs]: https://learn.microsoft.com/azure/ai-services/content-understanding/concepts/prebuilt-analyzers + diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample07_ListAnalyzers.md b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample07_ListAnalyzers.md new file mode 100644 index 000000000000..a1b7b475fd3f --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample07_ListAnalyzers.md @@ -0,0 +1,91 @@ +# List all analyzers + +This sample demonstrates how to list all available analyzers in your Microsoft Foundry resource, including both prebuilt and custom analyzers. + +## Before you begin + +This sample builds on concepts introduced in previous samples: +- [Sample 04: Create a custom analyzer][sample04] - Understanding custom analyzers +- [Sample 06: Get analyzer information][sample06] - Understanding analyzer details + +## About listing analyzers + +The `GetAnalyzersAsync` method returns an async enumerable of all analyzers in your resource, including: +- **Prebuilt analyzers**: System-provided analyzers like `prebuilt-documentSearch`, `prebuilt-invoice`, etc. +- **Custom analyzers**: Analyzers you've created + +This is useful for: +- **Discovery**: See what analyzers are available in your resource +- **Management**: Get an overview of all your custom analyzers +- **Debugging**: Verify that analyzers were created successfully + +## Prerequisites + +To get started you'll need a **Microsoft Foundry resource** with model deployments configured. See [Sample 00][sample00] for setup instructions. + +## Creating a `ContentUnderstandingClient` + +See [Sample 01][sample01] for authentication examples using `DefaultAzureCredential` or API key. + +## List all analyzers + +Iterate through all available analyzers: + +```C# Snippet:ContentUnderstandingListAnalyzers +// List all analyzers +var analyzers = new List(); +await foreach (var analyzer in client.GetAnalyzersAsync()) +{ + analyzers.Add(analyzer); +} + +Console.WriteLine($"Found {analyzers.Count} analyzer(s)"); + +// Display summary +var prebuiltCount = analyzers.Count(a => a.AnalyzerId?.StartsWith("prebuilt-") == true); +var customCount = analyzers.Count(a => a.AnalyzerId?.StartsWith("prebuilt-") != true); +Console.WriteLine($" Prebuilt analyzers: {prebuiltCount}"); +Console.WriteLine($" Custom analyzers: {customCount}"); + +// Display details for each analyzer +foreach (var analyzer in analyzers) +{ + Console.WriteLine($" ID: {analyzer.AnalyzerId}"); + Console.WriteLine($" Description: {analyzer.Description ?? "(none)"}"); + Console.WriteLine($" Status: {analyzer.Status}"); + + if (analyzer.AnalyzerId?.StartsWith("prebuilt-") == true) + { + Console.WriteLine(" Type: Prebuilt analyzer"); + } + else + { + Console.WriteLine(" Type: Custom analyzer"); + } +} +``` + +## Next steps + +- [Sample 08: Update analyzer][sample08] - Learn how to update an existing analyzer +- [Sample 09: Delete analyzer][sample09] - Learn how to delete an analyzer + +## Learn more + +- [Content Understanding Documentation][cu-docs] +- [Prebuilt Analyzers Documentation][prebuilt-docs] + +[sample00]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample00_ConfigureDefaults.md +[sample01]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample01_AnalyzeBinary.md +[sample04]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample04_CreateAnalyzer.md +[sample06]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample06_GetAnalyzer.md +[sample08]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample08_UpdateAnalyzer.md +[sample09]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample09_DeleteAnalyzer.md +[cu-docs]: https://learn.microsoft.com/azure/ai-services/content-understanding/ +[prebuilt-docs]: https://learn.microsoft.com/azure/ai-services/content-understanding/concepts/prebuilt-analyzers + + + + + + diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample08_UpdateAnalyzer.md b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample08_UpdateAnalyzer.md new file mode 100644 index 000000000000..695681d9952b --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample08_UpdateAnalyzer.md @@ -0,0 +1,80 @@ +# Update an analyzer + +This sample demonstrates how to update an existing custom analyzer, including updating its description and tags. + +## Before you begin + +This sample builds on concepts introduced in previous samples: +- [Sample 04: Create a custom analyzer][sample04] - Understanding custom analyzers +- [Sample 06: Get analyzer information][sample06] - Understanding analyzer details + +## About updating analyzers + +The `UpdateAnalyzerAsync` method allows you to modify certain properties of an existing analyzer: +- **Description**: Update the analyzer's description +- **Tags**: Add, update, or remove tags (set tag value to empty string to remove) + +**Note**: Not all analyzer properties can be updated. Field schemas, models, and configuration typically cannot be changed after creation. To change these, you may need to delete and recreate the analyzer. + +## Prerequisites + +To get started you'll need a **Microsoft Foundry resource** with model deployments configured. See [Sample 00][sample00] for setup instructions. + +## Creating a `ContentUnderstandingClient` + +See [Sample 01][sample01] for authentication examples using `DefaultAzureCredential` or API key. + +## Update an analyzer + +Update an analyzer's description and tags: + +```C# Snippet:ContentUnderstandingUpdateAnalyzer +// First, get the current analyzer to preserve base analyzer ID +var currentAnalyzer = await client.GetAnalyzerAsync(analyzerId); + +// Display current analyzer information +Console.WriteLine("Current analyzer information:"); +Console.WriteLine($" Description: {currentAnalyzer.Value.Description}"); +Console.WriteLine($" Tags: {string.Join(", ", currentAnalyzer.Value.Tags.Select(kvp => $"{kvp.Key}={kvp.Value}"))}"); + +// Create an updated analyzer with new description and tags +var updatedAnalyzer = new ContentAnalyzer +{ + BaseAnalyzerId = currentAnalyzer.Value.BaseAnalyzerId, + Description = "Updated description" +}; + +// Update tags (empty string removes a tag) +updatedAnalyzer.Tags["tag1"] = "tag1_updated_value"; +updatedAnalyzer.Tags["tag2"] = ""; // Remove tag2 +updatedAnalyzer.Tags["tag3"] = "tag3_value"; // Add tag3 + +// Update the analyzer +await client.UpdateAnalyzerAsync(analyzerId, updatedAnalyzer); + +// Verify the update +var updated = await client.GetAnalyzerAsync(analyzerId); +Console.WriteLine($"Description: {updated.Value.Description}"); +Console.WriteLine($"Tags: {string.Join(", ", updated.Value.Tags.Select(kvp => $"{kvp.Key}={kvp.Value}"))}"); +``` + +## Next steps + +- [Sample 09: Delete analyzer][sample09] - Learn how to delete an analyzer + +## Learn more + +- [Content Understanding Documentation][cu-docs] + +[sample00]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample00_ConfigureDefaults.md +[sample01]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample01_AnalyzeBinary.md +[sample04]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample04_CreateAnalyzer.md +[sample06]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample06_GetAnalyzer.md +[sample09]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample09_DeleteAnalyzer.md +[cu-docs]: https://learn.microsoft.com/azure/ai-services/content-understanding/ + + + + + + diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample09_DeleteAnalyzer.md b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample09_DeleteAnalyzer.md new file mode 100644 index 000000000000..31259d5fa19b --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample09_DeleteAnalyzer.md @@ -0,0 +1,86 @@ +# Delete an analyzer + +This sample demonstrates how to delete a custom analyzer. + +## Before you begin + +This sample builds on concepts introduced in previous samples: +- [Sample 04: Create a custom analyzer][sample04] - Understanding custom analyzers +- [Sample 08: Update analyzer][sample08] - Understanding analyzer management + +## About deleting analyzers + +The `DeleteAnalyzerAsync` method permanently removes a custom analyzer from your resource. This operation cannot be undone. + +**Important notes**: +- Only custom analyzers can be deleted. Prebuilt analyzers cannot be deleted. +- Deleting an analyzer does not delete analysis results that were created using that analyzer. +- Once deleted, the analyzer ID cannot be reused immediately. + +## Prerequisites + +To get started you'll need a **Microsoft Foundry resource** with model deployments configured. See [Sample 00][sample00] for setup instructions. + +## Creating a `ContentUnderstandingClient` + +See [Sample 01][sample01] for authentication examples using `DefaultAzureCredential` or API key. + +## Create a simple analyzer + +First, create a simple analyzer that we'll delete: + +```C# Snippet:ContentUnderstandingCreateSimpleAnalyzer +// First create a simple analyzer to delete +// Generate a unique analyzer ID +string analyzerId = $"my_analyzer_{DateTimeOffset.UtcNow.ToUnixTimeSeconds()}"; + +// Create a simple analyzer +var analyzer = new ContentAnalyzer +{ + BaseAnalyzerId = "prebuilt-document", + Description = "Simple analyzer for deletion example", + Config = new ContentAnalyzerConfig + { + ReturnDetails = true + } +}; +analyzer.Models.Add("completion", "gpt-4.1"); + +await client.CreateAnalyzerAsync( + WaitUntil.Completed, + analyzerId, + analyzer, + allowReplace: true); + +Console.WriteLine($"Analyzer '{analyzerId}' created successfully."); +``` + +## Delete an analyzer + +Delete the custom analyzer: + +```C# Snippet:ContentUnderstandingDeleteAnalyzer + // Delete an analyzer +await client.DeleteAnalyzerAsync(analyzerId); +Console.WriteLine($"Analyzer '{analyzerId}' deleted successfully."); +``` + +## Next steps + +You've completed the analyzer management samples! Consider exploring: +- [Sample 01: Analyze binary][sample01] - Analyze documents from files +- [Sample 02: Analyze URL][sample02] - Analyze documents from URLs +- [Sample 03: Analyze invoice][sample03] - Use prebuilt analyzers + +## Learn more + +- [Content Understanding Documentation][cu-docs] + +[sample00]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample00_ConfigureDefaults.md +[sample01]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample01_AnalyzeBinary.md +[sample02]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample02_AnalyzeUrl.md +[sample03]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample03_AnalyzeInvoice.md +[sample04]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample04_CreateAnalyzer.md +[sample08]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample08_UpdateAnalyzer.md +[cu-docs]: https://learn.microsoft.com/azure/ai-services/content-understanding/ + diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample10_AnalyzeConfigs.md b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample10_AnalyzeConfigs.md new file mode 100644 index 000000000000..00a97f2a83eb --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample10_AnalyzeConfigs.md @@ -0,0 +1,188 @@ +# Analyze documents with configs + +This sample demonstrates how to extract additional features from documents such as charts, hyperlinks, formulas, and annotations using the `prebuilt-documentSearch` analyzer, which has formulas, layout, and OCR enabled by default. + +## Before you begin + +This sample builds on concepts introduced in previous samples: +- [Sample 01: Analyze a document from binary data][sample01] - Basic analysis concepts + +## About analysis configs + +The `prebuilt-documentSearch` analyzer has the following configurations enabled by default: +- **EnableFormula**: Extracts mathematical formulas from documents +- **EnableLayout**: Extracts layout information (tables, figures, etc.) +- **EnableOcr**: Performs OCR on documents + +These configs enable extraction of: +- **Charts**: Chart figures with Chart.js configuration +- **Hyperlinks**: URLs and links found in the document +- **Formulas**: Mathematical formulas in LaTeX format +- **Annotations**: PDF annotations, comments, and markup + +For custom analyzers, you can configure these options in `ContentAnalyzerConfig` when creating the analyzer. + +## Prerequisites + +To get started you'll need a **Microsoft Foundry resource** with model deployments configured. See [Sample 00][sample00] for setup instructions. + +## Creating a `ContentUnderstandingClient` + +See [Sample 01][sample01] for authentication examples using `DefaultAzureCredential` or API key. + +## Analyze with configs + +Analyze a document using `prebuilt-documentSearch` which has formulas, layout, and OCR enabled: + +```C# Snippet:ContentUnderstandingAnalyzeWithConfigs +string filePath = ""; +byte[] fileBytes = File.ReadAllBytes(filePath); +BinaryData binaryData = BinaryData.FromBytes(fileBytes); + +// Analyze with prebuilt-documentSearch which has formulas, layout, and OCR enabled +// These configs enable extraction of charts, annotations, hyperlinks, and formulas +AnalyzeResultOperation operation = await client.AnalyzeBinaryAsync( + WaitUntil.Completed, + "prebuilt-documentSearch", + "application/pdf", + binaryData); + +AnalyzeResult result = operation.Value; +``` + +## Extract charts + +Extract chart figures from the document: + +```C# Snippet:ContentUnderstandingExtractCharts +// Extract charts from document content +if (result.Contents?.FirstOrDefault() is DocumentContent documentContent) +{ + if (documentContent.Figures != null && documentContent.Figures.Count > 0) + { + var chartFigures = documentContent.Figures + .Where(f => f is DocumentChartFigure) + .Cast() + .ToList(); + + Console.WriteLine($"Found {chartFigures.Count} chart(s)"); + foreach (var chart in chartFigures) + { + Console.WriteLine($" Chart ID: {chart.Id}"); + if (!string.IsNullOrEmpty(chart.Description)) + { + Console.WriteLine($" Description: {chart.Description}"); + } + if (chart.Caption != null && !string.IsNullOrEmpty(chart.Caption.Content)) + { + Console.WriteLine($" Caption: {chart.Caption.Content}"); + } + } + } +} +``` + +## Extract hyperlinks + +Extract hyperlinks from the document: + +```C# Snippet:ContentUnderstandingExtractHyperlinks +// Extract hyperlinks from document content +if (result.Contents?.FirstOrDefault() is DocumentContent docContent) +{ + if (docContent.Hyperlinks != null && docContent.Hyperlinks.Count > 0) + { + Console.WriteLine($"Found {docContent.Hyperlinks.Count} hyperlink(s)"); + foreach (var hyperlink in docContent.Hyperlinks) + { + Console.WriteLine($" URL: {hyperlink.Url ?? "(not available)"}"); + Console.WriteLine($" Content: {hyperlink.Content ?? "(not available)"}"); + } + } +} +``` + +## Extract formulas + +Extract mathematical formulas from document pages: + +```C# Snippet:ContentUnderstandingExtractFormulas +// Extract formulas from document pages +if (result.Contents?.FirstOrDefault() is DocumentContent content) +{ + var allFormulas = new System.Collections.Generic.List(); + if (content.Pages != null) + { + foreach (var page in content.Pages) + { + if (page.Formulas != null) + { + allFormulas.AddRange(page.Formulas); + } + } + } + + if (allFormulas.Count > 0) + { + Console.WriteLine($"Found {allFormulas.Count} formula(s)"); + foreach (var formula in allFormulas) + { + Console.WriteLine($" Formula Kind: {formula.Kind}"); + Console.WriteLine($" LaTeX: {formula.Value ?? "(not available)"}"); + if (formula.Confidence.HasValue) + { + Console.WriteLine($" Confidence: {formula.Confidence.Value:F2}"); + } + } + } +} +``` + +## Extract annotations + +Extract PDF annotations from the document: + +```C# Snippet:ContentUnderstandingExtractAnnotations +// Extract annotations from document content +if (result.Contents?.FirstOrDefault() is DocumentContent document) +{ + if (document.Annotations != null && document.Annotations.Count > 0) + { + Console.WriteLine($"Found {document.Annotations.Count} annotation(s)"); + foreach (var annotation in document.Annotations) + { + Console.WriteLine($" Annotation ID: {annotation.Id}"); + Console.WriteLine($" Kind: {annotation.Kind}"); + if (!string.IsNullOrEmpty(annotation.Author)) + { + Console.WriteLine($" Author: {annotation.Author}"); + } + if (annotation.Comments != null && annotation.Comments.Count > 0) + { + Console.WriteLine($" Comments: {annotation.Comments.Count}"); + foreach (var comment in annotation.Comments) + { + Console.WriteLine($" - {comment.Message}"); + } + } + } + } +} +``` + +## Next steps + +- [Sample 04: Create a custom analyzer][sample04] - Learn how to configure analysis options for custom analyzers +- [Sample 01: Analyze binary][sample01] - Learn more about basic document analysis + +## Learn more + +- [Content Understanding Documentation][cu-docs] +- [Document Elements Documentation][document-elements-docs] - Detailed information about document elements (pages, figures, annotations, etc.) + +[sample00]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample00_ConfigureDefaults.md +[sample01]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample01_AnalyzeBinary.md +[sample04]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample04_CreateAnalyzer.md +[cu-docs]: https://learn.microsoft.com/azure/ai-services/content-understanding/ +[document-elements-docs]: https://learn.microsoft.com/azure/ai-services/content-understanding/document/elements + diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample11_AnalyzeReturnRawJson.md b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample11_AnalyzeReturnRawJson.md new file mode 100644 index 000000000000..968943bcb027 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample11_AnalyzeReturnRawJson.md @@ -0,0 +1,174 @@ +# Return raw JSON from analysis + +This sample demonstrates how to access the raw JSON response from analysis operations using protocol methods. This is useful for advanced scenarios where you need direct access to the JSON structure. + +## Before you begin + +This sample builds on concepts introduced in previous samples: +- [Sample 01: Analyze a document from binary data][sample01] - Basic analysis concepts + +## About raw JSON responses + +The Content Understanding SDK provides two approaches for accessing analysis results: + +1. **Object model approach** (recommended): Returns strongly-typed `AnalyzeResult` objects that are easier to navigate and use. This is shown in [Sample 01][sample01]. + +2. **Protocol method approach**: Returns raw `BinaryData` containing the JSON response. This sample demonstrates this approach for advanced scenarios. + +**Important**: For production use, prefer the object model approach as it provides: +- Type safety +- IntelliSense support +- Easier navigation of results +- Better error handling + +Use raw JSON only when you need: +- Custom JSON processing +- Direct access to the raw response structure +- Integration with custom JSON parsers + +## Prerequisites + +To get started you'll need a **Microsoft Foundry resource** with model deployments configured. See [Sample 00][sample00] for setup instructions. + +## Creating a `ContentUnderstandingClient` + +See [Sample 01][sample01] for authentication examples using `DefaultAzureCredential` or API key. + +## Analyze and return raw JSON + +Use the protocol method to get raw JSON response: + +```C# Snippet:ContentUnderstandingAnalyzeReturnRawJson +string filePath = ""; +byte[] fileBytes = File.ReadAllBytes(filePath); + +// Use protocol method to get raw JSON response +// Note: For production use, prefer the object model approach (AnalyzeBinaryAsync with BinaryData) +// which returns AnalyzeResult objects that are easier to work with +var operation = await client.AnalyzeBinaryAsync( + WaitUntil.Completed, + "prebuilt-documentSearch", + "application/pdf", + RequestContent.Create(BinaryData.FromBytes(fileBytes))); + +BinaryData responseData = operation.Value; +``` + +## Parse raw JSON + +Parse and format the raw JSON response: + +```C# Snippet:ContentUnderstandingParseRawJson +// Parse the raw JSON response +using var jsonDocument = JsonDocument.Parse(responseData); + +// Pretty-print the JSON +string prettyJson = JsonSerializer.Serialize( + jsonDocument.RootElement, + new JsonSerializerOptions { WriteIndented = true }); + +// Create output directory if it doesn't exist +string outputDir = Path.Combine(AppContext.BaseDirectory, "sample_output"); +Directory.CreateDirectory(outputDir); + +// Save to file +string outputFileName = $"analyze_result_{DateTime.UtcNow:yyyyMMdd_HHmmss}.json"; +string outputPath = Path.Combine(outputDir, outputFileName); +File.WriteAllText(outputPath, prettyJson); + +Console.WriteLine($"Raw JSON response saved to: {outputPath}"); +Console.WriteLine($"File size: {prettyJson.Length:N0} characters"); +``` + +## Comparing approaches: Raw JSON vs object model + +The following comparison highlights the difference between the protocol method (raw JSON) and the object model approach: + +### Protocol method (raw JSON) + +```csharp +// Get raw JSON response +var operation = await client.AnalyzeBinaryAsync( + WaitUntil.Completed, + "prebuilt-documentSearch", + "application/pdf", + RequestContent.Create(BinaryData.FromBytes(fileBytes))); + +BinaryData responseData = operation.Value; + +// Parse JSON manually +using var jsonDocument = JsonDocument.Parse(responseData); +var resultElement = jsonDocument.RootElement.GetProperty("result"); +var analyzerId = resultElement.GetProperty("analyzerId").GetString(); +``` + +### Object model (recommended) + +```csharp +// Get strongly-typed result +var operation = await client.AnalyzeBinaryAsync( + WaitUntil.Completed, + "prebuilt-documentSearch", + "application/pdf", + BinaryData.FromBytes(fileBytes)); + +AnalyzeResult result = operation.Value; + +// Access properties directly +string analyzerId = result.AnalyzerId; +var contents = result.Contents; +``` + +**Key differences:** +- **Raw JSON**: Requires manual JSON parsing, no type safety, more verbose +- **Object Model**: Strongly-typed properties, IntelliSense support, cleaner code + +## Extract information from raw JSON + +Extract key information from the parsed JSON: + +```C# Snippet:ContentUnderstandingExtractFromRawJson +// Extract key information from raw JSON +var resultElement = jsonDocument.RootElement.GetProperty("result"); + +if (resultElement.TryGetProperty("analyzerId", out var analyzerIdElement)) +{ + Console.WriteLine($"Analyzer ID: {analyzerIdElement.GetString()}"); +} + +if (resultElement.TryGetProperty("contents", out var contentsElement) && + contentsElement.ValueKind == JsonValueKind.Array) +{ + Console.WriteLine($"Contents count: {contentsElement.GetArrayLength()}"); + + if (contentsElement.GetArrayLength() > 0) + { + var firstContent = contentsElement[0]; + if (firstContent.TryGetProperty("kind", out var kindElement)) + { + Console.WriteLine($"Content kind: {kindElement.GetString()}"); + } + if (firstContent.TryGetProperty("mimeType", out var mimeTypeElement)) + { + Console.WriteLine($"MIME type: {mimeTypeElement.GetString()}"); + } + } +} +``` + +## Next steps + +- [Sample 01: Analyze binary][sample01] - Learn the recommended object model approach +- [Sample 10: Analyze configs][sample10] - Learn about extracting features from results + +## Learn more + +- [Content Understanding Documentation][cu-docs] +- [Protocol Methods][protocol-methods-docs] - Learn about protocol methods in Azure SDKs + +[sample00]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample00_ConfigureDefaults.md +[sample01]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample01_AnalyzeBinary.md +[sample10]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample10_AnalyzeConfigs.md +[cu-docs]: https://learn.microsoft.com/azure/ai-services/content-understanding/ +[protocol-methods-docs]: https://aka.ms/azsdk/net/protocol-methods + diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample12_GetResultFile.md b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample12_GetResultFile.md new file mode 100644 index 000000000000..5b2f3bc1c533 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample12_GetResultFile.md @@ -0,0 +1,124 @@ +# Get result files from analysis + +This sample demonstrates how to retrieve result files (such as keyframe images) from a video analysis operation using the `GetResultFile` API. + +## Before you begin + +This sample builds on concepts introduced in previous samples: +- [Sample 01: Analyze a document from binary data][sample01] - Basic analysis concepts + +## About result files + +When analyzing video content, the Content Understanding service can generate result files such as: +- **Keyframe images**: Extracted frames from the video at specific timestamps +- **Other result files**: Additional files generated during analysis + +The `GetResultFile` API allows you to retrieve these files using: +- **Operation ID**: Extracted from the analysis operation +- **File path**: The path to the specific result file (e.g., `"keyframes/{frameTimeMs}"` for keyframe images) + +## Prerequisites + +To get started you'll need a **Microsoft Foundry resource** with model deployments configured. See [Sample 00][sample00] for setup instructions. + +## Creating a `ContentUnderstandingClient` + +See [Sample 01][sample01] for authentication examples using `DefaultAzureCredential` or API key. + +## Analyze video for result files + +Analyze a video to generate result files: + +```C# Snippet:ContentUnderstandingAnalyzeVideoForResultFiles +Uri videoUrl = new Uri(""); +// Start the analysis operation +var analyzeOperation = await client.AnalyzeAsync( + WaitUntil.Started, + "prebuilt-videoSearch", + inputs: new[] { new AnalyzeInput { Url = videoUrl } }); + +// Get the operation ID from the operation (available after Started) +string operationId = analyzeOperation.Id; +Console.WriteLine($"Operation ID: {operationId}"); + +// Wait for completion +await analyzeOperation.WaitForCompletionAsync(); + +AnalyzeResult result = analyzeOperation.Value; +``` + +## Get result file + +Retrieve a result file (keyframe image) using the operation ID and file path: + +```C# Snippet:ContentUnderstandingGetResultFile +// GetResultFile is used to retrieve result files (like keyframe images) from video analysis +// The path format is: "keyframes/{frameTimeMs}" where frameTimeMs is the timestamp in milliseconds + +// Example: Get a keyframe image (if available) +// Note: This example demonstrates the API pattern. In production, you would: +// 1. Analyze a video to get keyframe timestamps +// 2. Use those timestamps to construct paths like "keyframes/1000" for the frame at 1000ms +// 3. Call GetResultFileAsync with the operation ID and path + +// For video analysis, keyframes would be found in AudioVisualContent.KeyFrameTimesMs +var videoContent = result.Contents?.FirstOrDefault(c => c is AudioVisualContent) as AudioVisualContent; + +if (videoContent?.KeyFrameTimesMs != null && videoContent.KeyFrameTimesMs.Count > 0) +{ + // Print keyframe information + int totalKeyframes = videoContent.KeyFrameTimesMs.Count; + long firstFrameTimeMs = videoContent.KeyFrameTimesMs[0]; + Console.WriteLine($"Total keyframes: {totalKeyframes}"); + Console.WriteLine($"First keyframe time: {firstFrameTimeMs} ms"); + + // Get the first keyframe as an example + string framePath = $"keyframes/{firstFrameTimeMs}"; + + Console.WriteLine($"Getting result file: {framePath}"); + + // Get the result file (keyframe image) + Response fileResponse = await client.GetResultFileAsync( + operationId, + framePath); + + byte[] imageBytes = fileResponse.Value.ToArray(); + Console.WriteLine($"Retrieved keyframe image ({imageBytes.Length:N0} bytes)"); + + // Save the keyframe image to sample_output directory + string outputDir = Path.Combine(AppContext.BaseDirectory, "sample_output"); + Directory.CreateDirectory(outputDir); + string outputFileName = $"keyframe_{firstFrameTimeMs}.jpg"; + string outputPath = Path.Combine(outputDir, outputFileName); + File.WriteAllBytes(outputPath, imageBytes); + + Console.WriteLine($"Keyframe image saved to: {outputPath}"); +} +else +{ + Console.WriteLine("Note: This sample demonstrates GetResultFile API usage."); + Console.WriteLine(" For video analysis with keyframes, use prebuilt-videoSearch analyzer."); + Console.WriteLine(" Keyframes are available in AudioVisualContent.KeyFrameTimesMs."); + Console.WriteLine(); + Console.WriteLine($"Example usage with operation ID '{operationId}':"); + Console.WriteLine(" Response fileResponse = await client.GetResultFileAsync("); + Console.WriteLine(" operationId, \"keyframes/1000\");"); +} +``` + +## Next steps + +- [Sample 13: Delete result][sample13] - Learn how to delete analysis results +- [Sample 01: Analyze binary][sample01] - Learn more about basic document analysis + +## Learn more + +- [Content Understanding Documentation][cu-docs] +- [Video Analysis][video-docs] - Learn about video analysis capabilities + +[sample00]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample00_ConfigureDefaults.md +[sample01]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample01_AnalyzeBinary.md +[sample13]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample13_DeleteResult.md +[cu-docs]: https://learn.microsoft.com/azure/ai-services/content-understanding/ +[video-docs]: https://learn.microsoft.com/azure/ai-services/content-understanding/video/overview + diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample13_DeleteResult.md b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample13_DeleteResult.md new file mode 100644 index 000000000000..339e2143c47b --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample13_DeleteResult.md @@ -0,0 +1,88 @@ +# Delete analysis results + +This sample demonstrates how to delete analysis results using the `DeleteResult` API. This is useful for removing temporary or sensitive analysis results immediately, rather than waiting for automatic deletion after 24 hours. + +## Before you begin + +This sample builds on concepts introduced in previous samples: +- [Sample 01: Analyze a document from binary data][sample01] - Basic analysis concepts +- [Sample 12: Get result files][sample12] - Understanding operation IDs + +## About deleting results + +Analysis results are stored temporarily and can be deleted using the `DeleteResult` API: + +- **Immediate deletion**: Results are marked for deletion and permanently removed +- **Automatic deletion**: Results are automatically deleted after 24 hours if not manually deleted +- **Operation ID required**: You need the operation ID from the analysis operation to delete the resulthttps://learn.microsoft.com/azure/ai-services/content-understanding/concepts/operations + +**Important**: Once deleted, results cannot be recovered. Make sure you have saved any data you need before deleting. + +## Prerequisites + +To get started you'll need a **Microsoft Foundry resource** with model deployments configured. See [Sample 00][sample00] for setup instructions. + +## Creating a `ContentUnderstandingClient` + +See [Sample 01][sample01] for authentication examples using `DefaultAzureCredential` or API key. + +## Analyze and delete result + +Analyze a document and then delete the result: + +```C# Snippet:ContentUnderstandingAnalyzeAndDeleteResult +Uri documentUrl = new Uri(""); + +// Step 1: Start the analysis operation +var analyzeOperation = await client.AnalyzeAsync( + WaitUntil.Started, + "prebuilt-invoice", + inputs: new[] { new AnalyzeInput { Url = documentUrl } }); +// Get the operation ID from the operation (available after Started) +string operationId = analyzeOperation.Id; +Console.WriteLine($"Operation ID: {operationId}"); + +// Wait for completion +await analyzeOperation.WaitForCompletionAsync(); +AnalyzeResult result = analyzeOperation.Value; +Console.WriteLine("Analysis completed successfully!"); + +// Display some sample results +if (result.Contents?.FirstOrDefault() is DocumentContent docContent && docContent.Fields != null) +{ + Console.WriteLine($"Total fields extracted: {docContent.Fields.Count}"); + if (docContent.Fields.TryGetValue("CustomerName", out var customerNameField) && customerNameField is StringField sf) + { + Console.WriteLine($"Customer Name: {sf.ValueString ?? "(not found)"}"); + } +} + +// Step 2: Delete the analysis result +Console.WriteLine($"Deleting analysis result (Operation ID: {operationId})..."); +await client.DeleteResultAsync(operationId); +Console.WriteLine("Analysis result deleted successfully!"); +``` + +## When to delete results + +Delete results when you need to: +- **Remove sensitive data immediately**: Ensure sensitive information is not retained longer than necessary +- **Free up storage**: Remove results that are no longer needed +- **Comply with data retention policies**: Meet requirements for data deletion + +**Note**: Results are automatically deleted after 24 hours if not manually deleted. Manual deletion is only needed if you want to remove results immediately. + +## Next steps + +- [Sample 12: Get result files][sample12] - Learn how to retrieve result files using operation IDs +- [Sample 01: Analyze binary][sample01] - Learn more about basic document analysis + +## Learn more + +- [Content Understanding Documentation][cu-docs] + +[sample00]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample00_ConfigureDefaults.md +[sample01]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample01_AnalyzeBinary.md +[sample12]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample12_GetResultFile.md +[cu-docs]: https://learn.microsoft.com/azure/ai-services/content-understanding/ + diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample14_CopyAnalyzer.md b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample14_CopyAnalyzer.md new file mode 100644 index 000000000000..49de6db1b496 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample14_CopyAnalyzer.md @@ -0,0 +1,115 @@ +# Copy an analyzer + +This sample demonstrates how to copy an analyzer from source to target within the same resource using the `CopyAnalyzer` API. This is useful for creating copies of analyzers for testing, staging, or production deployment. + +## Before you begin + +This sample builds on concepts introduced in previous samples: +- [Sample 04: Create a custom analyzer][sample04] - Understanding analyzer creation +- [Sample 09: Delete analyzer][sample09] - Understanding analyzer lifecycle + +## About copying analyzers + +The `CopyAnalyzer` API allows you to copy an analyzer within the same Azure resource: + +- **Same-resource copy**: Copies an analyzer from one ID to another within the same resource +- **Exact copy**: The target analyzer is an exact copy of the source analyzer +- **Use cases**: Testing, staging, production deployment, versioning + +**Note**: For cross-resource copying (copying between different Azure resources or subscriptions), use the [GrantCopyAuth sample][sample15] instead. + +## Prerequisites + +To get started you'll need a **Microsoft Foundry resource** with model deployments configured. See [Sample 00][sample00] for setup instructions. + +## Creating a `ContentUnderstandingClient` + +See [Sample 01][sample01] for authentication examples using `DefaultAzureCredential` or API key. + +## Copy an analyzer + +Create a source analyzer and copy it to a target. First, create the source analyzer (see [Sample 04][sample04] for details on creating analyzers), then copy it: + +```C# Snippet:ContentUnderstandingCopyAnalyzer +await client.CopyAnalyzerAsync( + WaitUntil.Completed, + targetAnalyzerId, + sourceAnalyzerId); +``` + +After copying, get the target analyzer, update it with a production tag, and verify the update: + +```C# Snippet:ContentUnderstandingUpdateAndVerifyAnalyzer +// Get the target analyzer first to get its BaseAnalyzerId +var targetResponse = await client.GetAnalyzerAsync(targetAnalyzerId); +ContentAnalyzer targetAnalyzer = targetResponse.Value; + +// Update the target analyzer with a production tag +var updatedAnalyzer = new ContentAnalyzer +{ + BaseAnalyzerId = targetAnalyzer.BaseAnalyzerId +}; +updatedAnalyzer.Tags["modelType"] = "model_in_production"; + +await client.UpdateAnalyzerAsync(targetAnalyzerId, updatedAnalyzer); + +// Get the target analyzer again to verify the update +var updatedResponse = await client.GetAnalyzerAsync(targetAnalyzerId); +ContentAnalyzer updatedTargetAnalyzer = updatedResponse.Value; +Console.WriteLine($"Updated target analyzer description: {updatedTargetAnalyzer.Description}"); +Console.WriteLine($"Updated target analyzer tag: {updatedTargetAnalyzer.Tags["modelType"]}"); +``` + +Finally, clean up by deleting both analyzers: + +```C# Snippet:ContentUnderstandingDeleteCopiedAnalyzers +try +{ + await client.DeleteAnalyzerAsync(sourceAnalyzerId); + Console.WriteLine($"Source analyzer '{sourceAnalyzerId}' deleted successfully."); +} +catch +{ + // Ignore cleanup errors +} + +try +{ + await client.DeleteAnalyzerAsync(targetAnalyzerId); + Console.WriteLine($"Target analyzer '{targetAnalyzerId}' deleted successfully."); +} +catch +{ + // Ignore cleanup errors +} +``` + +## When to copy analyzers + +Copy analyzers when you need to: +- **Create test versions**: Copy production analyzers for testing without affecting production +- **Version management**: Maintain multiple versions of the same analyzer +- **Staging deployment**: Copy analyzers from development to staging environments +- **Backup**: Create backup copies of important analyzers + +**Note**: For cross-resource copying (between different Azure resources or subscriptions), use the [GrantCopyAuth sample][sample15] which demonstrates the full workflow with authorization. + +## Next steps + +- [Sample 15: Grant copy authorization][sample15] - Learn how to copy analyzers across resources +- [Sample 04: Create analyzer][sample04] - Learn more about creating custom analyzers +- [Sample 09: Delete analyzer][sample09] - Learn about analyzer lifecycle management + +## Learn more + +- [Content Understanding Documentation][cu-docs] +- [Analyzer Management][analyzer-docs] - Learn about managing analyzers + +[sample00]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample00_ConfigureDefaults.md +[sample01]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample01_AnalyzeBinary.md +[sample04]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample04_CreateAnalyzer.md +[sample09]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample09_DeleteAnalyzer.md +[sample15]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample15_GrantCopyAuth.md +[cu-docs]: https://learn.microsoft.com/azure/ai-services/content-understanding/ +[analyzer-docs]: https://learn.microsoft.com/azure/ai-services/content-understanding/concepts/analyzer-reference + diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample15_GrantCopyAuth.md b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample15_GrantCopyAuth.md new file mode 100644 index 000000000000..3538937a9d7c --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample15_GrantCopyAuth.md @@ -0,0 +1,213 @@ +# Grant copy authorization and copy analyzer + +This sample demonstrates how to grant copy authorization and copy an analyzer from a source resource to a target resource (cross-resource copying). This is useful for copying analyzers between different Azure resources or subscriptions. + +## Before you begin + +This sample builds on concepts introduced in previous samples: +- [Sample 04: Create a custom analyzer][sample04] - Understanding analyzer creation +- [Sample 14: Copy analyzer][sample14] - Understanding same-resource copying + +## About cross-resource copying + +The `GrantCopyAuthorization` and `CopyAnalyzer` APIs allow you to copy an analyzer between different Azure resources: + +- **Cross-resource copy**: Copies an analyzer from one Azure resource to another +- **Authorization required**: You must grant copy authorization before copying +- **Use cases**: Cross-subscription copying, resource migration, multi-region deployment + +**Note**: For same-resource copying (copying within the same Azure resource), use the [CopyAnalyzer sample][sample14] instead. + +## Prerequisites + +To get started you'll need: +- **Source Microsoft Foundry resource** with model deployments configured +- **Target Microsoft Foundry resource** with model deployments configured +- Both resources require 'Cognitive Services User' role for cross-resource copying +- See [Sample 00][sample00] for setup instructions + +## Configuration + +This sample requires additional environment variables for the source resource (that contains the source analyzers) and the target resource (that the analyzers will be copied into): + +```json +{ + "AZURE_CONTENT_UNDERSTANDING_ENDPOINT": "https://source-resource.services.ai.azure.com/", + "AZURE_CONTENT_UNDERSTANDING_SOURCE_RESOURCE_ID": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CognitiveServices/accounts/{name}", + "AZURE_CONTENT_UNDERSTANDING_SOURCE_REGION": "eastus", + "AZURE_CONTENT_UNDERSTANDING_TARGET_ENDPOINT": "https://target-resource.services.ai.azure.com/", + "AZURE_CONTENT_UNDERSTANDING_TARGET_RESOURCE_ID": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CognitiveServices/accounts/{name}", + "AZURE_CONTENT_UNDERSTANDING_TARGET_REGION": "westus", + "AZURE_CONTENT_UNDERSTANDING_KEY": "optional-source-api-key", + "AZURE_CONTENT_UNDERSTANDING_TARGET_KEY": "optional-target-api-key" +} +``` + +**Note**: API keys (`AZURE_CONTENT_UNDERSTANDING_KEY` and `AZURE_CONTENT_UNDERSTANDING_TARGET_KEY`) are only required when `DefaultAzureCredential` is not used. If you're using Azure authentication (e.g., `az login` or managed identity), you can omit the keys and the sample will use `DefaultAzureCredential` for authentication. + +## Creating a `ContentUnderstandingClient` + +See [Sample 01][sample01] for authentication examples using `DefaultAzureCredential` or API key. + +## Grant copy authorization and copy analyzer + +Create a source analyzer, grant copy authorization, and copy it to a target resource: + +> **Note:** This snippet requires `using Azure.Identity;` for `DefaultAzureCredential`. + +```C# Snippet:ContentUnderstandingGrantCopyAuth +// Get source endpoint from configuration +// Note: configuration is already loaded in Main method +string sourceEndpoint = "https://source-resource.services.ai.azure.com/"; +string? sourceKey = "optional-source-api-key"; // Set to null to use DefaultAzureCredential + +// Create source client +ContentUnderstandingClient sourceClient = !string.IsNullOrEmpty(sourceKey) + ? new ContentUnderstandingClient(new Uri(sourceEndpoint), new AzureKeyCredential(sourceKey)) + : new ContentUnderstandingClient(new Uri(sourceEndpoint), new DefaultAzureCredential()); + +// Source analyzer ID (must already exist in the source resource) +string sourceAnalyzerId = "my_source_analyzer_id_in_the_source_resource"; +// Target analyzer ID (will be created during copy) +string targetAnalyzerId = "my_target_analyzer_id_in_the_target_resource"; + +// Get source and target resource information from configuration +string sourceResourceId = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CognitiveServices/accounts/{name}"; +string sourceRegion = "eastus"; // Replace with actual source region +string targetEndpoint = "https://target-resource.services.ai.azure.com/"; +string targetResourceId = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CognitiveServices/accounts/{name}"; +string targetRegion = "westus"; // Replace with actual target region +string? targetKey = "optional-target-api-key"; // Set to null to use DefaultAzureCredential + +// Create target client +ContentUnderstandingClient targetClient = !string.IsNullOrEmpty(targetKey) + ? new ContentUnderstandingClient(new Uri(targetEndpoint), new AzureKeyCredential(targetKey)) + : new ContentUnderstandingClient(new Uri(targetEndpoint), new DefaultAzureCredential()); + +// Step 1: Create the source analyzer +var sourceConfig = new ContentAnalyzerConfig +{ + EnableFormula = false, + EnableLayout = true, + EnableOcr = true, + EstimateFieldSourceAndConfidence = true, + ReturnDetails = true +}; + +var sourceFieldSchema = new ContentFieldSchema( + new Dictionary + { + ["company_name"] = new ContentFieldDefinition + { + Type = ContentFieldType.String, + Method = GenerationMethod.Extract, + Description = "Name of the company" + }, + ["total_amount"] = new ContentFieldDefinition + { + Type = ContentFieldType.Number, + Method = GenerationMethod.Extract, + Description = "Total amount on the document" + } + }) +{ + Name = "company_schema", + Description = "Schema for extracting company information" +}; + +var sourceAnalyzer = new ContentAnalyzer +{ + BaseAnalyzerId = "prebuilt-document", + Description = "Source analyzer for cross-resource copying", + Config = sourceConfig, + FieldSchema = sourceFieldSchema +}; +sourceAnalyzer.Models.Add("completion", "gpt-4.1"); + +var createOperation = await sourceClient.CreateAnalyzerAsync( + WaitUntil.Completed, + sourceAnalyzerId, + sourceAnalyzer); +var sourceResult = createOperation.Value; +Console.WriteLine($"Source analyzer '{sourceAnalyzerId}' created successfully!"); + +try +{ + // Step 2: Grant copy authorization + var copyAuth = await sourceClient.GrantCopyAuthorizationAsync( + sourceAnalyzerId, + targetResourceId, + targetRegion); + + Console.WriteLine("Copy authorization granted successfully!"); + Console.WriteLine($" Target Azure Resource ID: {copyAuth.Value.TargetAzureResourceId}"); + Console.WriteLine($" Target Region: {targetRegion}"); + Console.WriteLine($" Expires at: {copyAuth.Value.ExpiresAt}"); + + // Step 3: Copy analyzer to target resource + var copyOperation = await targetClient.CopyAnalyzerAsync( + WaitUntil.Completed, + targetAnalyzerId, + sourceAnalyzerId, + sourceResourceId, + sourceRegion); + + var targetResult = copyOperation.Value; + Console.WriteLine($"Target analyzer '{targetAnalyzerId}' copied successfully to target resource!"); + Console.WriteLine($"Target analyzer description: {targetResult.Description}"); + +} +finally +{ + // Clean up: delete both analyzers + try + { + await sourceClient.DeleteAnalyzerAsync(sourceAnalyzerId); + Console.WriteLine($"Source analyzer '{sourceAnalyzerId}' deleted successfully."); + } + catch + { + // Ignore cleanup errors + } + + try + { + await targetClient.DeleteAnalyzerAsync(targetAnalyzerId); + Console.WriteLine($"Target analyzer '{targetAnalyzerId}' deleted successfully."); + } + catch + { + // Ignore cleanup errors + } +} +``` + +## When to use cross-resource copying + +Use cross-resource copying when you need to: +- **Copy between subscriptions**: Move analyzers between different Azure subscriptions +- **Multi-region deployment**: Deploy the same analyzer to multiple regions +- **Resource migration**: Migrate analyzers from one resource to another +- **Environment promotion**: Promote analyzers from development to production across resources + +**Note**: Both source and target resources require 'Cognitive Services User' role for cross-resource copying. The copy authorization expires after a certain time, so copy operations should be performed soon after granting authorization. + +## Next steps + +- [Sample 14: Copy analyzer][sample14] - Learn about same-resource copying +- [Sample 04: Create analyzer][sample04] - Learn more about creating custom analyzers +- [Sample 09: Delete analyzer][sample09] - Learn about analyzer lifecycle management + +## Learn more + +- [Content Understanding Documentation][cu-docs] +- [Analyzer Management][analyzer-docs] - Learn about managing analyzers + +[sample00]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample00_ConfigureDefaults.md +[sample01]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample01_AnalyzeBinary.md +[sample04]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample04_CreateAnalyzer.md +[sample09]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample09_DeleteAnalyzer.md +[sample14]: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/samples/Sample14_CopyAnalyzer.md +[cu-docs]: https://learn.microsoft.com/azure/ai-services/content-understanding/ +[analyzer-docs]: https://learn.microsoft.com/azure/ai-services/content-understanding/concepts/analyzer-reference + diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/AnalyzeResult.Customizations.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/AnalyzeResult.Customizations.cs new file mode 100644 index 000000000000..2b1c39738f2e --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/AnalyzeResult.Customizations.cs @@ -0,0 +1,46 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Text.Json; +using Azure; +using Azure.Core; + +namespace Azure.AI.ContentUnderstanding +{ + /// + /// Partial class for AnalyzeResult to customize LRO response handling. + /// + // SDK-FIX: Suppress FromLroResponse to fix service response format inconsistency (service sometimes wraps AnalyzeResult in "result" property, sometimes returns it directly) + [CodeGenSuppress("FromLroResponse", typeof(Response))] + public partial class AnalyzeResult + { + /// + /// Converts a response to an AnalyzeResult using the LRO result path. + /// + /// + /// SDK-FIX: Customized to handle service response format inconsistency. The service sometimes wraps AnalyzeResult + /// in a "result" property, and sometimes returns it directly. This workaround uses TryGetProperty to handle both formats. + /// + /// The response from the service. + internal static AnalyzeResult FromLroResponse(Response response) + { + using JsonDocument document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + JsonElement rootElement = document.RootElement; + + // SDK-FIX: Check if the response has a "result" property, otherwise use the root element directly (handles both response formats) + if (rootElement.TryGetProperty("result", out JsonElement resultElement)) + { + return DeserializeAnalyzeResult(resultElement, ModelSerializationExtensions.WireOptions); + } + else + { + // The response might be the AnalyzeResult directly + return DeserializeAnalyzeResult(rootElement, ModelSerializationExtensions.WireOptions); + } + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/AnalyzeResultOperation.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/AnalyzeResultOperation.cs new file mode 100644 index 000000000000..f1dca2b9c8ef --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/AnalyzeResultOperation.cs @@ -0,0 +1,105 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +#nullable enable +using System; +using System.Threading; +using System.Threading.Tasks; +using Azure; +using Azure.Core; + +namespace Azure.AI.ContentUnderstanding +{ + /// + /// Wrapper for that exposes the operation ID via the property. + /// + public class AnalyzeResultOperation : Operation + { + private readonly Operation _innerOperation; + private readonly string? _operationId; + + /// + /// Initializes a new instance of for mocking. + /// + protected AnalyzeResultOperation() + { + _innerOperation = null!; + } + + /// + /// Initializes a new instance of . + /// + /// The inner operation to wrap. + /// Optional operation ID. If not provided, will be extracted from the operation's raw response. + internal AnalyzeResultOperation(Operation innerOperation, string? operationId = null) + { + _innerOperation = innerOperation ?? throw new ArgumentNullException(nameof(innerOperation)); + _operationId = operationId ?? ExtractOperationId(innerOperation); + } + + /// + /// Gets the operation ID from the Operation-Location header of the operation response. + /// This operation ID can be used with , + /// , + /// , + /// and methods. + /// + public override string Id => _operationId ?? throw new InvalidOperationException("The operation ID was not present in the service response."); + + /// + public override AnalyzeResult Value => _innerOperation.Value; + + /// + public override bool HasValue => _innerOperation.HasValue; + + /// + public override bool HasCompleted => _innerOperation.HasCompleted; + + /// + public override Response GetRawResponse() => _innerOperation.GetRawResponse(); + + /// + public override Response UpdateStatus(CancellationToken cancellationToken = default) + => _innerOperation.UpdateStatus(cancellationToken); + + /// + public override ValueTask UpdateStatusAsync(CancellationToken cancellationToken = default) + => _innerOperation.UpdateStatusAsync(cancellationToken); + + /// + public override Response WaitForCompletion(CancellationToken cancellationToken = default) + => _innerOperation.WaitForCompletion(cancellationToken); + + /// + public override Response WaitForCompletion(TimeSpan pollingInterval, CancellationToken cancellationToken = default) + => _innerOperation.WaitForCompletion(pollingInterval, cancellationToken); + + /// + public override ValueTask> WaitForCompletionAsync(CancellationToken cancellationToken = default) + => _innerOperation.WaitForCompletionAsync(cancellationToken); + + /// + public override ValueTask> WaitForCompletionAsync(TimeSpan pollingInterval, CancellationToken cancellationToken = default) + => _innerOperation.WaitForCompletionAsync(pollingInterval, cancellationToken); + + private static string? ExtractOperationId(Operation operation) + { + var rawResponse = operation.GetRawResponse(); + if (rawResponse != null && rawResponse.Headers.TryGetValue("Operation-Location", out var operationLocation)) + { + // Extract operation ID from the URL: .../analyzerResults/{operationId} + // Use the same approach as the old extension method for consistency + if (Uri.TryCreate(operationLocation, UriKind.Absolute, out var uri)) + { + var segments = uri.Segments; + if (segments.Length > 0) + { + return segments[segments.Length - 1].TrimEnd('/'); + } + } + } + + return null; + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/ArrayField.Extensions.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/ArrayField.Extensions.cs new file mode 100644 index 000000000000..efd1e91ae5bf --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/ArrayField.Extensions.cs @@ -0,0 +1,38 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +namespace Azure.AI.ContentUnderstanding +{ + /// + /// Extension methods and convenience properties for . + /// + public partial class ArrayField + { + /// + /// Gets the number of items in the array. + /// + public int Count => ValueArray?.Count ?? 0; + + /// + /// Gets a field from the array by index. + /// + /// The zero-based index of the field to retrieve. + /// The field at the specified index, or null if the index is out of range. + public ContentField this[int index] + { + get + { + if (ValueArray != null && index >= 0 && index < ValueArray.Count) + { + return ValueArray[index]; + } + return null; + } + } + } +} + diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/AudioVisualContent.Customizations.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/AudioVisualContent.Customizations.cs new file mode 100644 index 000000000000..a9ea2dccc447 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/AudioVisualContent.Customizations.cs @@ -0,0 +1,272 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// + /// Partial class for AudioVisualContent to customize serialization/deserialization. + /// + // SDK-FIX: Suppress DeserializeAudioVisualContent to fix KeyFrameTimesMs property name casing inconsistency (service returns "KeyFrameTimesMs" instead of "keyFrameTimesMs") + [CodeGenSuppress("DeserializeAudioVisualContent", typeof(JsonElement), typeof(ModelReaderWriterOptions))] + public partial class AudioVisualContent + { + /// + /// SDK-FIX: Override serialization to use "KeyFrameTimesMs" (capital K) to match service response format instead of "keyFrameTimesMs" + /// + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AudioVisualContent)} does not support writing '{format}' format."); + } + base.JsonModelWriteCore(writer, options); + writer.WritePropertyName("startTimeMs"u8); + writer.WriteNumberValue(StartTimeMs); + writer.WritePropertyName("endTimeMs"u8); + writer.WriteNumberValue(EndTimeMs); + if (Optional.IsDefined(Width)) + { + writer.WritePropertyName("width"u8); + writer.WriteNumberValue(Width.Value); + } + if (Optional.IsDefined(Height)) + { + writer.WritePropertyName("height"u8); + writer.WriteNumberValue(Height.Value); + } + if (Optional.IsCollectionDefined(CameraShotTimesMs)) + { + writer.WritePropertyName("cameraShotTimesMs"u8); + writer.WriteStartArray(); + foreach (long item in CameraShotTimesMs) + { + writer.WriteNumberValue(item); + } + writer.WriteEndArray(); + } + if (Optional.IsCollectionDefined(KeyFrameTimesMs)) + { + // SDK-FIX: Serialize as "KeyFrameTimesMs" (capital K) to match service response format instead of "keyFrameTimesMs" + writer.WritePropertyName("KeyFrameTimesMs"u8); + writer.WriteStartArray(); + foreach (long item in KeyFrameTimesMs) + { + writer.WriteNumberValue(item); + } + writer.WriteEndArray(); + } + if (Optional.IsCollectionDefined(TranscriptPhrases)) + { + writer.WritePropertyName("transcriptPhrases"u8); + writer.WriteStartArray(); + foreach (TranscriptPhrase item in TranscriptPhrases) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (Optional.IsCollectionDefined(Segments)) + { + writer.WritePropertyName("segments"u8); + writer.WriteStartArray(); + foreach (AudioVisualContentSegment item in Segments) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + } + + // SDK-FIX: Reimplement deserialization to handle both "keyFrameTimesMs" (TypeSpec definition) and "KeyFrameTimesMs" (service response format) + internal static AudioVisualContent DeserializeAudioVisualContent(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + MediaContentKind kind = default; + string mimeType = default; + string analyzerId = default; + string category = default; + string path = default; + string markdown = default; + IDictionary fields = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + long startTimeMs = default; + long endTimeMs = default; + int? width = default; + int? height = default; + IList cameraShotTimesMs = default; + IList keyFrameTimesMs = default; + IList transcriptPhrases = default; + IList segments = default; + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("kind"u8)) + { + kind = new MediaContentKind(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("mimeType"u8)) + { + mimeType = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("analyzerId"u8)) + { + analyzerId = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("category"u8)) + { + category = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("path"u8)) + { + path = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("markdown"u8)) + { + markdown = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("fields"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + Dictionary dictionary = new Dictionary(); + foreach (var prop0 in prop.Value.EnumerateObject()) + { + dictionary.Add(prop0.Name, ContentField.DeserializeContentField(prop0.Value, options)); + } + fields = dictionary; + continue; + } + if (prop.NameEquals("startTimeMs"u8)) + { + startTimeMs = prop.Value.GetInt64(); + continue; + } + if (prop.NameEquals("endTimeMs"u8)) + { + endTimeMs = prop.Value.GetInt64(); + continue; + } + if (prop.NameEquals("width"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + width = prop.Value.GetInt32(); + continue; + } + if (prop.NameEquals("height"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + height = prop.Value.GetInt32(); + continue; + } + if (prop.NameEquals("cameraShotTimesMs"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(item.GetInt64()); + } + cameraShotTimesMs = array; + continue; + } + // SDK-FIX: Handle both "keyFrameTimesMs" (TypeSpec definition) and "KeyFrameTimesMs" (service response format - capital K) + if (prop.NameEquals("keyFrameTimesMs"u8) || prop.NameEquals("KeyFrameTimesMs"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + // Only set if not already set (to avoid overwriting if both casings are present) + if (keyFrameTimesMs == null) + { + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(item.GetInt64()); + } + keyFrameTimesMs = array; + } + continue; + } + if (prop.NameEquals("transcriptPhrases"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(TranscriptPhrase.DeserializeTranscriptPhrase(item, options)); + } + transcriptPhrases = array; + continue; + } + if (prop.NameEquals("segments"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(AudioVisualContentSegment.DeserializeAudioVisualContentSegment(item, options)); + } + segments = array; + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new AudioVisualContent( + kind, + mimeType, + analyzerId, + category, + path, + markdown, + fields ?? new ChangeTrackingDictionary(), + additionalBinaryDataProperties, + startTimeMs, + endTimeMs, + width, + height, + cameraShotTimesMs ?? new ChangeTrackingList(), + keyFrameTimesMs ?? new ChangeTrackingList(), + transcriptPhrases ?? new ChangeTrackingList(), + segments ?? new ChangeTrackingList()); + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Azure.AI.ContentUnderstanding.csproj b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Azure.AI.ContentUnderstanding.csproj new file mode 100644 index 000000000000..baee2398269a --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Azure.AI.ContentUnderstanding.csproj @@ -0,0 +1,42 @@ + + + Azure.AI.ContentUnderstanding client library for Azure Cognitive Services Content Understanding, a multimodal AI service that extracts semantic content from documents, audio, and video files + SDK Code Generation Azure.AI.ContentUnderstanding + 1.0.0-beta.1 + Azure.AI.ContentUnderstanding + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/ContentAnalyzer.Customizations.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/ContentAnalyzer.Customizations.cs new file mode 100644 index 000000000000..7be2877bb3d1 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/ContentAnalyzer.Customizations.cs @@ -0,0 +1,46 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Text.Json; +using Azure; +using Azure.Core; + +namespace Azure.AI.ContentUnderstanding +{ + /// + /// Partial class for ContentAnalyzer to customize LRO response handling. + /// + // SDK-FIX: Suppress FromLroResponse to fix service response format inconsistency (service sometimes wraps ContentAnalyzer in "result" property, sometimes returns it directly) + [CodeGenSuppress("FromLroResponse", typeof(Response))] + public partial class ContentAnalyzer + { + /// + /// Converts a response to a ContentAnalyzer using the LRO result path. + /// + /// + /// SDK-FIX: Customized to handle service response format inconsistency. The service sometimes wraps ContentAnalyzer + /// in a "result" property, and sometimes returns it directly. This workaround uses TryGetProperty to handle both formats. + /// + /// The response from the service. + internal static ContentAnalyzer FromLroResponse(Response response) + { + using JsonDocument document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + JsonElement rootElement = document.RootElement; + + // SDK-FIX: Check if the response has a "result" property, otherwise use the root element directly (handles both response formats) + if (rootElement.TryGetProperty("result", out JsonElement resultElement)) + { + return DeserializeContentAnalyzer(resultElement, ModelSerializationExtensions.WireOptions); + } + else + { + // The response might be the ContentAnalyzer directly + return DeserializeContentAnalyzer(rootElement, ModelSerializationExtensions.WireOptions); + } + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/ContentField.Extensions.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/ContentField.Extensions.cs new file mode 100644 index 000000000000..40f462297c4e --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/ContentField.Extensions.cs @@ -0,0 +1,56 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + /// + /// Extension methods and convenience properties for . + /// + public partial class ContentField + { + /// + /// Gets the value of the field, regardless of its type. + /// Returns the appropriate typed value for each field type: + /// - : returns + /// - : returns + /// - : returns + /// - : returns + /// - : returns + /// - : returns + /// - : returns + /// - : returns + /// - : returns + /// + /// + /// + /// // Simple field access + /// var customerName = documentContent.Fields["CustomerName"].Value?.ToString(); + /// + /// // Nested object access + /// var totalAmountObj = (ObjectField)documentContent.Fields["TotalAmount"]; + /// var amount = totalAmountObj.ValueObject["Amount"].Value; + /// + /// + public object Value => this switch + { + StringField sf => sf.ValueString, + NumberField nf => nf.ValueNumber, + IntegerField inf => inf.ValueInteger, + DateField df => df.ValueDate, + TimeField tf => tf.ValueTime, + BooleanField bf => bf.ValueBoolean, + ObjectField of => of.ValueObject, + ArrayField af => af.ValueArray, + JsonField jf => jf.ValueJson, + _ => null + }; + } +} + diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/ContentUnderstandingClient.Customizations.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/ContentUnderstandingClient.Customizations.cs new file mode 100644 index 000000000000..2aaca8fc2b7d --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/ContentUnderstandingClient.Customizations.cs @@ -0,0 +1,271 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +#nullable enable +using System; +using System.Collections.Generic; +using System.ClientModel.Primitives; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Azure.Core; + +namespace Azure.AI.ContentUnderstanding +{ + /// + /// Partial class for ContentUnderstandingClient to customize generated methods. + /// + // Suppress convenience methods with stringEncoding parameter - we'll implement custom versions without it + [CodeGenSuppress("AnalyzeAsync", typeof(WaitUntil), typeof(string), typeof(IEnumerable), typeof(IDictionary), typeof(string), typeof(ProcessingLocation?), typeof(CancellationToken))] + [CodeGenSuppress("Analyze", typeof(WaitUntil), typeof(string), typeof(IEnumerable), typeof(IDictionary), typeof(string), typeof(ProcessingLocation?), typeof(CancellationToken))] + [CodeGenSuppress("AnalyzeBinaryAsync", typeof(WaitUntil), typeof(string), typeof(string), typeof(BinaryData), typeof(string), typeof(ProcessingLocation?), typeof(string), typeof(CancellationToken))] + [CodeGenSuppress("AnalyzeBinary", typeof(WaitUntil), typeof(string), typeof(string), typeof(BinaryData), typeof(string), typeof(ProcessingLocation?), typeof(string), typeof(CancellationToken))] + // SDK-FIX: Suppress CreateCopyAnalyzerRequest to fix copy endpoint path (emitter generates ":copyAnalyzer" instead of ":copy") and status code handling (service returns both 201 and 202) + [CodeGenSuppress("CreateCopyAnalyzerRequest", typeof(string), typeof(RequestContent), typeof(bool?), typeof(RequestContext))] + public partial class ContentUnderstandingClient + { + // CUSTOM CODE NOTE: we're suppressing the generation of the Analyze and AnalyzeBinary + // convenience methods and adding methods manually below for the following reasons: + // - Hiding the stringEncoding parameter. We're making its value default to 'utf16' (appropriate for .NET). + // - Exposing operation ID via the Id property on the returned Operation via AnalyzeResultOperation wrapper. + + private const string DefaultStringEncoding = "utf16"; + + /// Extract content and fields from input. + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// The unique identifier of the analyzer. + /// Inputs to analyze. Currently, only pro mode supports multiple inputs. + /// + /// Override default mapping of model names to deployments. + /// Ex. { "gpt-4.1": "myGpt41Deployment", "text-embedding-3-large": "myTextEmbedding3LargeDeployment" }. + /// + /// The location where the data may be processed. Defaults to global. + /// The cancellation token that can be used to cancel the operation. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// The with operation ID accessible via the Id property. + public virtual async Task AnalyzeAsync(WaitUntil waitUntil, string analyzerId, IEnumerable? inputs = default, IDictionary? modelDeployments = default, ProcessingLocation? processingLocation = default, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(analyzerId, nameof(analyzerId)); + + AnalyzeRequest1 spreadModel = new AnalyzeRequest1(inputs?.ToList() as IList ?? new ChangeTrackingList(), modelDeployments ?? new ChangeTrackingDictionary(), new ChangeTrackingDictionary()); + Operation result = await AnalyzeAsync(waitUntil, analyzerId, spreadModel, DefaultStringEncoding, processingLocation?.ToString(), cancellationToken.ToRequestContext()).ConfigureAwait(false); + // Extract operation ID from the original operation before conversion, as the converted operation might not preserve the Operation-Location header + string? operationId = ExtractOperationIdFromBinaryDataOperation(result); + Operation converted = ProtocolOperationHelpers.Convert(result, response => AnalyzeResult.FromLroResponse(response), ClientDiagnostics, "ContentUnderstandingClient.AnalyzeAsync"); + return new AnalyzeResultOperation(converted, operationId); + } + + /// Extract content and fields from input. + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// The unique identifier of the analyzer. + /// Inputs to analyze. Currently, only pro mode supports multiple inputs. + /// + /// Override default mapping of model names to deployments. + /// Ex. { "gpt-4.1": "myGpt41Deployment", "text-embedding-3-large": "myTextEmbedding3LargeDeployment" }. + /// + /// The location where the data may be processed. Defaults to global. + /// The cancellation token that can be used to cancel the operation. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// The with operation ID accessible via the Id property. + public virtual AnalyzeResultOperation Analyze(WaitUntil waitUntil, string analyzerId, IEnumerable? inputs = default, IDictionary? modelDeployments = default, ProcessingLocation? processingLocation = default, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(analyzerId, nameof(analyzerId)); + + AnalyzeRequest1 spreadModel = new AnalyzeRequest1(inputs?.ToList() as IList ?? new ChangeTrackingList(), modelDeployments ?? new ChangeTrackingDictionary(), new ChangeTrackingDictionary()); + Operation result = Analyze(waitUntil, analyzerId, spreadModel, DefaultStringEncoding, processingLocation?.ToString(), cancellationToken.ToRequestContext()); + // Extract operation ID from the original operation before conversion, as the converted operation might not preserve the Operation-Location header + string? operationId = ExtractOperationIdFromBinaryDataOperation(result); + Operation converted = ProtocolOperationHelpers.Convert(result, response => AnalyzeResult.FromLroResponse(response), ClientDiagnostics, "ContentUnderstandingClient.Analyze"); + return new AnalyzeResultOperation(converted, operationId); + } + + /// Extract content and fields from binary input. + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// The unique identifier of the analyzer. + /// Request content type. + /// The binary content of the document to analyze. + /// This parameter is ignored. The SDK always uses "utf16" encoding for .NET. + /// The location where the data may be processed. Defaults to global. + /// Range of the input to analyze (ex. `1-3,5,9-`). Document content uses 1-based page numbers, while audio visual content uses integer milliseconds. + /// The cancellation token that can be used to cancel the operation. + /// , or is null. + /// or is an empty string, and was expected to be non-empty. + /// The with operation ID accessible via the Id property. + /// + /// To avoid ambiguity with the protocol method, explicitly specify the return type as AnalyzeResultOperation when calling this method. + /// + public virtual async Task AnalyzeBinaryAsync(WaitUntil waitUntil, string analyzerId, string contentType, BinaryData binaryInput, string? stringEncoding = default, ProcessingLocation? processingLocation = default, string? inputRange = default, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(analyzerId, nameof(analyzerId)); + Argument.AssertNotNullOrEmpty(contentType, nameof(contentType)); + Argument.AssertNotNull(binaryInput, nameof(binaryInput)); + + // Ignore stringEncoding parameter - always use utf16 for .NET + Operation result = await AnalyzeBinaryAsync(waitUntil, analyzerId, contentType, RequestContent.Create(binaryInput), DefaultStringEncoding, processingLocation?.ToString(), inputRange, cancellationToken.ToRequestContext()).ConfigureAwait(false); + // Extract operation ID from the original operation before conversion, as the converted operation might not preserve the Operation-Location header + string? operationId = ExtractOperationIdFromBinaryDataOperation(result); + Operation converted = ProtocolOperationHelpers.Convert(result, response => AnalyzeResult.FromLroResponse(response), ClientDiagnostics, "ContentUnderstandingClient.AnalyzeBinaryAsync"); + return new AnalyzeResultOperation(converted, operationId); + } + + /// Extract content and fields from binary input. + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// The unique identifier of the analyzer. + /// Request content type. + /// The binary content of the document to analyze. + /// This parameter is ignored. The SDK always uses "utf16" encoding for .NET. + /// The location where the data may be processed. Defaults to global. + /// Range of the input to analyze (ex. `1-3,5,9-`). Document content uses 1-based page numbers, while audio visual content uses integer milliseconds. + /// The cancellation token that can be used to cancel the operation. + /// , or is null. + /// or is an empty string, and was expected to be non-empty. + /// The with operation ID accessible via the Id property. + /// + /// To avoid ambiguity with the protocol method, explicitly specify the return type as AnalyzeResultOperation when calling this method. + /// + public virtual AnalyzeResultOperation AnalyzeBinary(WaitUntil waitUntil, string analyzerId, string contentType, BinaryData binaryInput, string? stringEncoding = default, ProcessingLocation? processingLocation = default, string? inputRange = default, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(analyzerId, nameof(analyzerId)); + Argument.AssertNotNullOrEmpty(contentType, nameof(contentType)); + Argument.AssertNotNull(binaryInput, nameof(binaryInput)); + + // Ignore stringEncoding parameter - always use utf16 for .NET + Operation result = AnalyzeBinary(waitUntil, analyzerId, contentType, RequestContent.Create(binaryInput), DefaultStringEncoding, processingLocation?.ToString(), inputRange, cancellationToken.ToRequestContext()); + // Extract operation ID from the original operation before conversion, as the converted operation might not preserve the Operation-Location header + string? operationId = ExtractOperationIdFromBinaryDataOperation(result); + Operation converted = ProtocolOperationHelpers.Convert(result, response => AnalyzeResult.FromLroResponse(response), ClientDiagnostics, "ContentUnderstandingClient.AnalyzeBinary"); + return new AnalyzeResultOperation(converted, operationId); + } + + /// + /// Extracts the operation ID from an Operation<BinaryData> by reading the Operation-Location header. + /// + private static string? ExtractOperationIdFromBinaryDataOperation(Operation operation) + { + var rawResponse = operation.GetRawResponse(); + if (rawResponse != null && rawResponse.Headers.TryGetValue("Operation-Location", out var operationLocation)) + { + // Extract operation ID from the URL: .../analyzerResults/{operationId} + // Use the same approach as the old extension method for consistency + if (Uri.TryCreate(operationLocation, UriKind.Absolute, out var uri)) + { + var segments = uri.Segments; + if (segments.Length > 0) + { + return segments[segments.Length - 1].TrimEnd('/'); + } + } + } + + return null; + } + + // SDK-FIX: Response classifier to accept both 201 and 202 status codes (service inconsistently returns both) + private static ResponseClassifier? _pipelineMessageClassifier201202; + private static ResponseClassifier PipelineMessageClassifier201202 => + _pipelineMessageClassifier201202 ??= new StatusCodeClassifier(stackalloc ushort[] { 201, 202 }); + + /// + /// Creates the HTTP message for the copy analyzer request. + /// + /// + /// SDK-FIX: Customized to fix copy endpoint path (emitter generates ":copyAnalyzer" instead of ":copy") + /// and status code handling (service returns both 201 and 202 instead of just 202). + /// + internal HttpMessage CreateCopyAnalyzerRequest(string analyzerId, RequestContent content, bool? allowReplace, RequestContext context) + { + RawRequestUriBuilder uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/contentunderstanding", false); + uri.AppendPath("/analyzers/", false); + uri.AppendPath(analyzerId, true); + uri.AppendPath(":copy", false); // SDK-FIX: Changed from ":copyAnalyzer" to ":copy" (emitter generates wrong endpoint path) + uri.AppendQuery("api-version", _apiVersion, true); + if (allowReplace != null) + { + uri.AppendQuery("allowReplace", TypeFormatters.ConvertToString(allowReplace), true); + } + HttpMessage message = Pipeline.CreateMessage(context, PipelineMessageClassifier201202); // SDK-FIX: Changed from PipelineMessageClassifier202 to accept both 201 and 202 (service inconsistently returns both status codes) + Request request = message.Request; + request.Uri = uri; + request.Method = RequestMethod.Post; + request.Headers.SetValue("Content-Type", "application/json"); + request.Headers.SetValue("Accept", "application/json"); + request.Content = content; + return message; + } + + // TODO: Uncomment these methods when ready to regenerate the SDK. + // These methods are currently commented out because the generated code has been manually + // edited to make UpdateDefaults methods internal. Once the SDK is regenerated with the + // proper configuration to generate them as internal, uncomment these to ensure they + // remain internal even after regeneration. + // + // According to autorest.csharp customization pattern (https://github.com/Azure/autorest.csharp#replace-any-generated-member), + // defining a partial class with the same method signature but different accessibility + // replaces the generated public method with this internal version. + + /* + // TODO: Uncomment these methods when ready to regenerate the SDK. + // These methods are currently commented out because the generated code has been manually + // edited to make UpdateDefaults methods internal. Once the SDK is regenerated with the + // proper configuration to generate them as internal, uncomment these to ensure they + // remain internal even after regeneration. + // + // According to autorest.csharp customization pattern (https://github.com/Azure/autorest.csharp#replace-any-generated-member), + // defining a partial class with the same method signature but different accessibility + // replaces the generated public method with this internal version. + + /* + /// + /// [Protocol Method] Update default model deployment settings. + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// This method is internal. Use the convenience extension methods or + /// instead. + /// + /// The content to send as the body of the request. + /// The request options, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + internal virtual Response UpdateDefaults(RequestContent content, RequestContext? context = null) + { + // The generated implementation will be inserted here by autorest.csharp + // This method signature replaces the public version from the generated code + throw new NotImplementedException(); + } + + /// + /// [Protocol Method] Update default model deployment settings asynchronously. + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// This method is internal. Use the convenience extension methods or + /// instead. + /// + /// The content to send as the body of the request. + /// The request options, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + internal virtual async Task UpdateDefaultsAsync(RequestContent content, RequestContext? context = null) + { + // The generated implementation will be inserted here by autorest.csharp + // This method signature replaces the public version from the generated code + throw new NotImplementedException(); + } + */ + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/ContentUnderstandingClient.Extensions.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/ContentUnderstandingClient.Extensions.cs new file mode 100644 index 000000000000..05f153e9b8a7 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/ContentUnderstandingClient.Extensions.cs @@ -0,0 +1,110 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +#nullable enable +using System; +using System.Collections.Generic; +using System.ClientModel.Primitives; +using System.Threading; +using System.Threading.Tasks; +using Azure; +using Azure.Core; + +namespace Azure.AI.ContentUnderstanding +{ + /// + /// Extension methods for to provide convenience APIs. + /// + public static partial class ContentUnderstandingClientExtensions + { + /// Update analyzer properties. + /// The client instance. + /// The unique identifier of the analyzer. + /// The resource instance with properties to update. + /// The cancellation token that can be used to cancel the operation. + /// , , or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + public static Response UpdateAnalyzer(this ContentUnderstandingClient client, string analyzerId, ContentAnalyzer resource, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(client, nameof(client)); + Argument.AssertNotNullOrEmpty(analyzerId, nameof(analyzerId)); + Argument.AssertNotNull(resource, nameof(resource)); + + return client.UpdateAnalyzer(analyzerId, RequestContent.Create(resource), cancellationToken.ToRequestContext()); + } + + /// Update analyzer properties asynchronously. + /// The client instance. + /// The unique identifier of the analyzer. + /// The resource instance with properties to update. + /// The cancellation token that can be used to cancel the operation. + /// , , or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + public static async Task UpdateAnalyzerAsync(this ContentUnderstandingClient client, string analyzerId, ContentAnalyzer resource, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(client, nameof(client)); + Argument.AssertNotNullOrEmpty(analyzerId, nameof(analyzerId)); + Argument.AssertNotNull(resource, nameof(resource)); + + return await client.UpdateAnalyzerAsync(analyzerId, RequestContent.Create(resource), cancellationToken.ToRequestContext()).ConfigureAwait(false); + } + + /// Update default model deployment settings. + /// + /// This is the recommended public API for updating default model deployment settings. + /// The generated protocol methods (UpdateDefaults/UpdateDefaultsAsync with RequestContent) are internal + /// and should not be used directly. This convenience method provides a simpler API that accepts + /// a dictionary mapping model names to deployment names. + /// + /// The client instance. + /// Mapping of model names to deployment names. For example: { "gpt-4.1": "myGpt41Deployment", "text-embedding-3-large": "myTextEmbedding3LargeDeployment" }. + /// The cancellation token that can be used to cancel the operation. + /// or is null. + /// Service returned a non-success status code. + /// The response returned from the service. + public static Response UpdateDefaults(this ContentUnderstandingClient client, IDictionary modelDeployments, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(client, nameof(client)); + Argument.AssertNotNull(modelDeployments, nameof(modelDeployments)); + + var defaults = ContentUnderstandingModelFactory.ContentUnderstandingDefaults(modelDeployments); + var writerOptions = new ModelReaderWriterOptions("W"); + var requestContent = RequestContent.Create( + ModelReaderWriter.Write(defaults, writerOptions, AzureAIContentUnderstandingContext.Default)); + + Response response = client.UpdateDefaults(requestContent, cancellationToken.ToRequestContext()); + return Response.FromValue((ContentUnderstandingDefaults)response, response); + } + + /// Update default model deployment settings asynchronously. + /// + /// This is the recommended public API for updating default model deployment settings. + /// The generated protocol methods (UpdateDefaults/UpdateDefaultsAsync with RequestContent) are internal + /// and should not be used directly. This convenience method provides a simpler API that accepts + /// a dictionary mapping model names to deployment names. + /// + /// The client instance. + /// Mapping of model names to deployment names. For example: { "gpt-4.1": "myGpt41Deployment", "text-embedding-3-large": "myTextEmbedding3LargeDeployment" }. + /// The cancellation token that can be used to cancel the operation. + /// or is null. + /// Service returned a non-success status code. + /// The response returned from the service. + public static async Task> UpdateDefaultsAsync(this ContentUnderstandingClient client, IDictionary modelDeployments, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(client, nameof(client)); + Argument.AssertNotNull(modelDeployments, nameof(modelDeployments)); + + var defaults = ContentUnderstandingModelFactory.ContentUnderstandingDefaults(modelDeployments); + var writerOptions = new ModelReaderWriterOptions("W"); + var requestContent = RequestContent.Create( + ModelReaderWriter.Write(defaults, writerOptions, AzureAIContentUnderstandingContext.Default)); + + Response response = await client.UpdateDefaultsAsync(requestContent, cancellationToken.ToRequestContext()).ConfigureAwait(false); + return Response.FromValue((ContentUnderstandingDefaults)response, response); + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/DocumentContent.Extensions.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/DocumentContent.Extensions.cs new file mode 100644 index 000000000000..4b41f4084896 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/DocumentContent.Extensions.cs @@ -0,0 +1,35 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + /// + /// Extension methods for . + /// + public partial class DocumentContent + { + /// + /// Gets a field from the document by name. + /// + /// The name of the field to retrieve. + /// The field if found, or null if not found. + public ContentField this[string fieldName] + { + get + { + if (Fields != null && Fields.TryGetValue(fieldName, out var field)) + { + return field; + } + return null; + } + } + } +} + diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/CollectionResults/ContentUnderstandingClientGetAnalyzersAsyncCollectionResult.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/CollectionResults/ContentUnderstandingClientGetAnalyzersAsyncCollectionResult.cs new file mode 100644 index 000000000000..89d9f35a81d1 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/CollectionResults/ContentUnderstandingClientGetAnalyzersAsyncCollectionResult.cs @@ -0,0 +1,80 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Threading.Tasks; +using Azure; +using Azure.Core; +using Azure.Core.Pipeline; + +namespace Azure.AI.ContentUnderstanding +{ + internal partial class ContentUnderstandingClientGetAnalyzersAsyncCollectionResult : AsyncPageable + { + private readonly ContentUnderstandingClient _client; + private readonly RequestContext _context; + + /// Initializes a new instance of ContentUnderstandingClientGetAnalyzersAsyncCollectionResult, which is used to iterate over the pages of a collection. + /// The ContentUnderstandingClient client used to send requests. + /// The request options, which can override default behaviors of the client pipeline on a per-call basis. + public ContentUnderstandingClientGetAnalyzersAsyncCollectionResult(ContentUnderstandingClient client, RequestContext context) : base(context?.CancellationToken ?? default) + { + _client = client; + _context = context; + } + + /// Gets the pages of ContentUnderstandingClientGetAnalyzersAsyncCollectionResult as an enumerable collection. + /// A continuation token indicating where to resume paging. + /// The number of items per page. + /// The pages of ContentUnderstandingClientGetAnalyzersAsyncCollectionResult as an enumerable collection. + public override async IAsyncEnumerable> AsPages(string continuationToken, int? pageSizeHint) + { + Uri nextPage = continuationToken != null ? new Uri(continuationToken) : null; + while (true) + { + Response response = await GetNextResponseAsync(pageSizeHint, nextPage).ConfigureAwait(false); + if (response is null) + { + yield break; + } + PagedContentAnalyzer result = (PagedContentAnalyzer)response; + List items = new List(); + foreach (var item in result.Value) + { + items.Add(ModelReaderWriter.Write(item, ModelSerializationExtensions.WireOptions, AzureAIContentUnderstandingContext.Default)); + } + yield return Page.FromValues(items, nextPage?.AbsoluteUri, response); + nextPage = result.NextLink; + if (nextPage == null) + { + yield break; + } + } + } + + /// Get next page. + /// The number of items per page. + /// The next link to use for the next page of results. + private async ValueTask GetNextResponseAsync(int? pageSizeHint, Uri nextLink) + { + HttpMessage message = nextLink != null ? _client.CreateNextGetAnalyzersRequest(nextLink, _context) : _client.CreateGetAnalyzersRequest(_context); + using DiagnosticScope scope = _client.ClientDiagnostics.CreateScope("ContentUnderstandingClient.GetAnalyzers"); + scope.Start(); + try + { + return await _client.Pipeline.ProcessMessageAsync(message, _context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/CollectionResults/ContentUnderstandingClientGetAnalyzersAsyncCollectionResultOfT.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/CollectionResults/ContentUnderstandingClientGetAnalyzersAsyncCollectionResultOfT.cs new file mode 100644 index 000000000000..1a869c49286b --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/CollectionResults/ContentUnderstandingClientGetAnalyzersAsyncCollectionResultOfT.cs @@ -0,0 +1,74 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Threading.Tasks; +using Azure; +using Azure.Core; +using Azure.Core.Pipeline; + +namespace Azure.AI.ContentUnderstanding +{ + internal partial class ContentUnderstandingClientGetAnalyzersAsyncCollectionResultOfT : AsyncPageable + { + private readonly ContentUnderstandingClient _client; + private readonly RequestContext _context; + + /// Initializes a new instance of ContentUnderstandingClientGetAnalyzersAsyncCollectionResultOfT, which is used to iterate over the pages of a collection. + /// The ContentUnderstandingClient client used to send requests. + /// The request options, which can override default behaviors of the client pipeline on a per-call basis. + public ContentUnderstandingClientGetAnalyzersAsyncCollectionResultOfT(ContentUnderstandingClient client, RequestContext context) : base(context?.CancellationToken ?? default) + { + _client = client; + _context = context; + } + + /// Gets the pages of ContentUnderstandingClientGetAnalyzersAsyncCollectionResultOfT as an enumerable collection. + /// A continuation token indicating where to resume paging. + /// The number of items per page. + /// The pages of ContentUnderstandingClientGetAnalyzersAsyncCollectionResultOfT as an enumerable collection. + public override async IAsyncEnumerable> AsPages(string continuationToken, int? pageSizeHint) + { + Uri nextPage = continuationToken != null ? new Uri(continuationToken) : null; + while (true) + { + Response response = await GetNextResponseAsync(pageSizeHint, nextPage).ConfigureAwait(false); + if (response is null) + { + yield break; + } + PagedContentAnalyzer result = (PagedContentAnalyzer)response; + yield return Page.FromValues((IReadOnlyList)result.Value, nextPage?.AbsoluteUri, response); + nextPage = result.NextLink; + if (nextPage == null) + { + yield break; + } + } + } + + /// Get next page. + /// The number of items per page. + /// The next link to use for the next page of results. + private async ValueTask GetNextResponseAsync(int? pageSizeHint, Uri nextLink) + { + HttpMessage message = nextLink != null ? _client.CreateNextGetAnalyzersRequest(nextLink, _context) : _client.CreateGetAnalyzersRequest(_context); + using DiagnosticScope scope = _client.ClientDiagnostics.CreateScope("ContentUnderstandingClient.GetAnalyzers"); + scope.Start(); + try + { + return await _client.Pipeline.ProcessMessageAsync(message, _context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/CollectionResults/ContentUnderstandingClientGetAnalyzersCollectionResult.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/CollectionResults/ContentUnderstandingClientGetAnalyzersCollectionResult.cs new file mode 100644 index 000000000000..67f8ea827530 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/CollectionResults/ContentUnderstandingClientGetAnalyzersCollectionResult.cs @@ -0,0 +1,79 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using Azure; +using Azure.Core; +using Azure.Core.Pipeline; + +namespace Azure.AI.ContentUnderstanding +{ + internal partial class ContentUnderstandingClientGetAnalyzersCollectionResult : Pageable + { + private readonly ContentUnderstandingClient _client; + private readonly RequestContext _context; + + /// Initializes a new instance of ContentUnderstandingClientGetAnalyzersCollectionResult, which is used to iterate over the pages of a collection. + /// The ContentUnderstandingClient client used to send requests. + /// The request options, which can override default behaviors of the client pipeline on a per-call basis. + public ContentUnderstandingClientGetAnalyzersCollectionResult(ContentUnderstandingClient client, RequestContext context) : base(context?.CancellationToken ?? default) + { + _client = client; + _context = context; + } + + /// Gets the pages of ContentUnderstandingClientGetAnalyzersCollectionResult as an enumerable collection. + /// A continuation token indicating where to resume paging. + /// The number of items per page. + /// The pages of ContentUnderstandingClientGetAnalyzersCollectionResult as an enumerable collection. + public override IEnumerable> AsPages(string continuationToken, int? pageSizeHint) + { + Uri nextPage = continuationToken != null ? new Uri(continuationToken) : null; + while (true) + { + Response response = GetNextResponse(pageSizeHint, nextPage); + if (response is null) + { + yield break; + } + PagedContentAnalyzer result = (PagedContentAnalyzer)response; + List items = new List(); + foreach (var item in result.Value) + { + items.Add(ModelReaderWriter.Write(item, ModelSerializationExtensions.WireOptions, AzureAIContentUnderstandingContext.Default)); + } + yield return Page.FromValues(items, nextPage?.AbsoluteUri, response); + nextPage = result.NextLink; + if (nextPage == null) + { + yield break; + } + } + } + + /// Get next page. + /// The number of items per page. + /// The next link to use for the next page of results. + private Response GetNextResponse(int? pageSizeHint, Uri nextLink) + { + HttpMessage message = nextLink != null ? _client.CreateNextGetAnalyzersRequest(nextLink, _context) : _client.CreateGetAnalyzersRequest(_context); + using DiagnosticScope scope = _client.ClientDiagnostics.CreateScope("ContentUnderstandingClient.GetAnalyzers"); + scope.Start(); + try + { + return _client.Pipeline.ProcessMessage(message, _context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/CollectionResults/ContentUnderstandingClientGetAnalyzersCollectionResultOfT.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/CollectionResults/ContentUnderstandingClientGetAnalyzersCollectionResultOfT.cs new file mode 100644 index 000000000000..00890517af86 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/CollectionResults/ContentUnderstandingClientGetAnalyzersCollectionResultOfT.cs @@ -0,0 +1,73 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using Azure; +using Azure.Core; +using Azure.Core.Pipeline; + +namespace Azure.AI.ContentUnderstanding +{ + internal partial class ContentUnderstandingClientGetAnalyzersCollectionResultOfT : Pageable + { + private readonly ContentUnderstandingClient _client; + private readonly RequestContext _context; + + /// Initializes a new instance of ContentUnderstandingClientGetAnalyzersCollectionResultOfT, which is used to iterate over the pages of a collection. + /// The ContentUnderstandingClient client used to send requests. + /// The request options, which can override default behaviors of the client pipeline on a per-call basis. + public ContentUnderstandingClientGetAnalyzersCollectionResultOfT(ContentUnderstandingClient client, RequestContext context) : base(context?.CancellationToken ?? default) + { + _client = client; + _context = context; + } + + /// Gets the pages of ContentUnderstandingClientGetAnalyzersCollectionResultOfT as an enumerable collection. + /// A continuation token indicating where to resume paging. + /// The number of items per page. + /// The pages of ContentUnderstandingClientGetAnalyzersCollectionResultOfT as an enumerable collection. + public override IEnumerable> AsPages(string continuationToken, int? pageSizeHint) + { + Uri nextPage = continuationToken != null ? new Uri(continuationToken) : null; + while (true) + { + Response response = GetNextResponse(pageSizeHint, nextPage); + if (response is null) + { + yield break; + } + PagedContentAnalyzer result = (PagedContentAnalyzer)response; + yield return Page.FromValues((IReadOnlyList)result.Value, nextPage?.AbsoluteUri, response); + nextPage = result.NextLink; + if (nextPage == null) + { + yield break; + } + } + } + + /// Get next page. + /// The number of items per page. + /// The next link to use for the next page of results. + private Response GetNextResponse(int? pageSizeHint, Uri nextLink) + { + HttpMessage message = nextLink != null ? _client.CreateNextGetAnalyzersRequest(nextLink, _context) : _client.CreateGetAnalyzersRequest(_context); + using DiagnosticScope scope = _client.ClientDiagnostics.CreateScope("ContentUnderstandingClient.GetAnalyzers"); + scope.Start(); + try + { + return _client.Pipeline.ProcessMessage(message, _context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/ContentUnderstandingClient.RestClient.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/ContentUnderstandingClient.RestClient.cs new file mode 100644 index 000000000000..60ac55653364 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/ContentUnderstandingClient.RestClient.cs @@ -0,0 +1,304 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using Azure; +using Azure.Core; + +namespace Azure.AI.ContentUnderstanding +{ + /// + public partial class ContentUnderstandingClient + { + private static ResponseClassifier _pipelineMessageClassifier200; + private static ResponseClassifier _pipelineMessageClassifier200201; + private static ResponseClassifier _pipelineMessageClassifier202; + private static ResponseClassifier _pipelineMessageClassifier204; + + private static ResponseClassifier PipelineMessageClassifier200 => _pipelineMessageClassifier200 = new StatusCodeClassifier(stackalloc ushort[] { 200 }); + + private static ResponseClassifier PipelineMessageClassifier200201 => _pipelineMessageClassifier200201 = new StatusCodeClassifier(stackalloc ushort[] { 200, 201 }); + + private static ResponseClassifier PipelineMessageClassifier202 => _pipelineMessageClassifier202 = new StatusCodeClassifier(stackalloc ushort[] { 202 }); + + private static ResponseClassifier PipelineMessageClassifier204 => _pipelineMessageClassifier204 = new StatusCodeClassifier(stackalloc ushort[] { 204 }); + + internal HttpMessage CreateAnalyzeRequest(string analyzerId, RequestContent content, string stringEncoding, string processingLocation, RequestContext context) + { + RawRequestUriBuilder uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/contentunderstanding", false); + uri.AppendPath("/analyzers/", false); + uri.AppendPath(analyzerId, true); + uri.AppendPath(":analyze", false); + uri.AppendQuery("api-version", _apiVersion, true); + if (stringEncoding != null) + { + uri.AppendQuery("stringEncoding", stringEncoding, true); + } + if (processingLocation != null) + { + uri.AppendQuery("processingLocation", processingLocation, true); + } + HttpMessage message = Pipeline.CreateMessage(context, PipelineMessageClassifier202); + Request request = message.Request; + request.Uri = uri; + request.Method = RequestMethod.Post; + request.Headers.SetValue("Content-Type", "application/json"); + request.Headers.SetValue("Accept", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateAnalyzeBinaryRequest(string analyzerId, string contentType, RequestContent content, string stringEncoding, string processingLocation, string inputRange, RequestContext context) + { + RawRequestUriBuilder uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/contentunderstanding", false); + uri.AppendPath("/analyzers/", false); + uri.AppendPath(analyzerId, true); + uri.AppendPath(":analyzeBinary", false); + uri.AppendQuery("api-version", _apiVersion, true); + if (stringEncoding != null) + { + uri.AppendQuery("stringEncoding", stringEncoding, true); + } + if (processingLocation != null) + { + uri.AppendQuery("processingLocation", processingLocation, true); + } + if (inputRange != null) + { + uri.AppendQuery("range", inputRange, true); + } + HttpMessage message = Pipeline.CreateMessage(context, PipelineMessageClassifier202); + Request request = message.Request; + request.Uri = uri; + request.Method = RequestMethod.Post; + request.Headers.SetValue("Content-Type", contentType); + request.Headers.SetValue("Accept", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateCreateAnalyzerRequest(string analyzerId, RequestContent content, bool? allowReplace, RequestContext context) + { + RawRequestUriBuilder uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/contentunderstanding", false); + uri.AppendPath("/analyzers/", false); + uri.AppendPath(analyzerId, true); + uri.AppendQuery("api-version", _apiVersion, true); + if (allowReplace != null) + { + uri.AppendQuery("allowReplace", TypeFormatters.ConvertToString(allowReplace), true); + } + HttpMessage message = Pipeline.CreateMessage(context, PipelineMessageClassifier200201); + Request request = message.Request; + request.Uri = uri; + request.Method = RequestMethod.Put; + request.Headers.SetValue("Content-Type", "application/json"); + request.Headers.SetValue("Accept", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateDeleteAnalyzerRequest(string analyzerId, RequestContext context) + { + RawRequestUriBuilder uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/contentunderstanding", false); + uri.AppendPath("/analyzers/", false); + uri.AppendPath(analyzerId, true); + uri.AppendQuery("api-version", _apiVersion, true); + HttpMessage message = Pipeline.CreateMessage(context, PipelineMessageClassifier204); + Request request = message.Request; + request.Uri = uri; + request.Method = RequestMethod.Delete; + return message; + } + + internal HttpMessage CreateDeleteResultRequest(string operationId, RequestContext context) + { + RawRequestUriBuilder uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/contentunderstanding", false); + uri.AppendPath("/analyzerResults/", false); + uri.AppendPath(operationId, true); + uri.AppendQuery("api-version", _apiVersion, true); + HttpMessage message = Pipeline.CreateMessage(context, PipelineMessageClassifier204); + Request request = message.Request; + request.Uri = uri; + request.Method = RequestMethod.Delete; + return message; + } + + internal HttpMessage CreateGetAnalyzerRequest(string analyzerId, RequestContext context) + { + RawRequestUriBuilder uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/contentunderstanding", false); + uri.AppendPath("/analyzers/", false); + uri.AppendPath(analyzerId, true); + uri.AppendQuery("api-version", _apiVersion, true); + HttpMessage message = Pipeline.CreateMessage(context, PipelineMessageClassifier200); + Request request = message.Request; + request.Uri = uri; + request.Method = RequestMethod.Get; + request.Headers.SetValue("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetDefaultsRequest(RequestContext context) + { + RawRequestUriBuilder uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/contentunderstanding", false); + uri.AppendPath("/defaults", false); + uri.AppendQuery("api-version", _apiVersion, true); + HttpMessage message = Pipeline.CreateMessage(context, PipelineMessageClassifier200); + Request request = message.Request; + request.Uri = uri; + request.Method = RequestMethod.Get; + request.Headers.SetValue("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetOperationStatusRequest(string analyzerId, string operationId, RequestContext context) + { + RawRequestUriBuilder uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/contentunderstanding", false); + uri.AppendPath("/analyzers/", false); + uri.AppendPath(analyzerId, true); + uri.AppendPath("/operations/", false); + uri.AppendPath(operationId, true); + uri.AppendQuery("api-version", _apiVersion, true); + HttpMessage message = Pipeline.CreateMessage(context, PipelineMessageClassifier200); + Request request = message.Request; + request.Uri = uri; + request.Method = RequestMethod.Get; + request.Headers.SetValue("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetResultRequest(string operationId, RequestContext context) + { + RawRequestUriBuilder uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/contentunderstanding", false); + uri.AppendPath("/analyzerResults/", false); + uri.AppendPath(operationId, true); + uri.AppendQuery("api-version", _apiVersion, true); + HttpMessage message = Pipeline.CreateMessage(context, PipelineMessageClassifier200); + Request request = message.Request; + request.Uri = uri; + request.Method = RequestMethod.Get; + request.Headers.SetValue("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetResultFileRequest(string operationId, string path, RequestContext context) + { + RawRequestUriBuilder uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/contentunderstanding", false); + uri.AppendPath("/analyzerResults/", false); + uri.AppendPath(operationId, true); + uri.AppendPath("/files/", false); + uri.AppendPath(path, false); + uri.AppendQuery("api-version", _apiVersion, true); + HttpMessage message = Pipeline.CreateMessage(context, PipelineMessageClassifier200); + Request request = message.Request; + request.Uri = uri; + request.Method = RequestMethod.Get; + request.Headers.SetValue("Accept", "*/*"); + return message; + } + + internal HttpMessage CreateGrantCopyAuthorizationRequest(string analyzerId, RequestContent content, RequestContext context) + { + RawRequestUriBuilder uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/contentunderstanding", false); + uri.AppendPath("/analyzers/", false); + uri.AppendPath(analyzerId, true); + uri.AppendPath(":grantCopyAuthorization", false); + uri.AppendQuery("api-version", _apiVersion, true); + HttpMessage message = Pipeline.CreateMessage(context, PipelineMessageClassifier200); + Request request = message.Request; + request.Uri = uri; + request.Method = RequestMethod.Post; + request.Headers.SetValue("Content-Type", "application/json"); + request.Headers.SetValue("Accept", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateGetAnalyzersRequest(RequestContext context) + { + RawRequestUriBuilder uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/contentunderstanding", false); + uri.AppendPath("/analyzers", false); + uri.AppendQuery("api-version", _apiVersion, true); + HttpMessage message = Pipeline.CreateMessage(context, PipelineMessageClassifier200); + Request request = message.Request; + request.Uri = uri; + request.Method = RequestMethod.Get; + request.Headers.SetValue("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateNextGetAnalyzersRequest(Uri nextPage, RequestContext context) + { + RawRequestUriBuilder uri = new RawRequestUriBuilder(); + uri.Reset(nextPage); + HttpMessage message = Pipeline.CreateMessage(context, PipelineMessageClassifier200); + Request request = message.Request; + request.Uri = uri; + request.Method = RequestMethod.Get; + request.Headers.SetValue("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateUpdateAnalyzerRequest(string analyzerId, RequestContent content, RequestContext context) + { + RawRequestUriBuilder uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/contentunderstanding", false); + uri.AppendPath("/analyzers/", false); + uri.AppendPath(analyzerId, true); + uri.AppendQuery("api-version", _apiVersion, true); + HttpMessage message = Pipeline.CreateMessage(context, PipelineMessageClassifier200); + Request request = message.Request; + request.Uri = uri; + request.Method = RequestMethod.Patch; + request.Headers.SetValue("Content-Type", "application/merge-patch+json"); + request.Headers.SetValue("Accept", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateUpdateDefaultsRequest(RequestContent content, RequestContext context) + { + RawRequestUriBuilder uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/contentunderstanding", false); + uri.AppendPath("/defaults", false); + uri.AppendQuery("api-version", _apiVersion, true); + HttpMessage message = Pipeline.CreateMessage(context, PipelineMessageClassifier200); + Request request = message.Request; + request.Uri = uri; + request.Method = RequestMethod.Patch; + request.Headers.SetValue("Content-Type", "application/merge-patch+json"); + request.Headers.SetValue("Accept", "application/json"); + request.Content = content; + return message; + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/ContentUnderstandingClient.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/ContentUnderstandingClient.cs new file mode 100644 index 000000000000..5a356b80ce98 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/ContentUnderstandingClient.cs @@ -0,0 +1,1296 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Threading; +using System.Threading.Tasks; +using Azure; +using Azure.Core; +using Azure.Core.Pipeline; + +namespace Azure.AI.ContentUnderstanding +{ + /// The ContentUnderstandingClient. + public partial class ContentUnderstandingClient + { + private readonly Uri _endpoint; + /// A credential used to authenticate to the service. + private readonly AzureKeyCredential _keyCredential; + private const string AuthorizationHeader = "Ocp-Apim-Subscription-Key"; + /// A credential used to authenticate to the service. + private readonly TokenCredential _tokenCredential; + private static readonly string[] AuthorizationScopes = new string[] { "https://cognitiveservices.azure.com/.default" }; + private readonly string _apiVersion; + + /// Initializes a new instance of ContentUnderstandingClient for mocking. + protected ContentUnderstandingClient() + { + } + + /// Initializes a new instance of ContentUnderstandingClient. + /// Service endpoint. + /// A credential used to authenticate to the service. + /// or is null. + public ContentUnderstandingClient(Uri endpoint, AzureKeyCredential credential) : this(endpoint, credential, new ContentUnderstandingClientOptions()) + { + } + + /// Initializes a new instance of ContentUnderstandingClient. + /// Service endpoint. + /// A credential used to authenticate to the service. + /// or is null. + public ContentUnderstandingClient(Uri endpoint, TokenCredential credential) : this(endpoint, credential, new ContentUnderstandingClientOptions()) + { + } + + /// Initializes a new instance of ContentUnderstandingClient. + /// Service endpoint. + /// A credential used to authenticate to the service. + /// The options for configuring the client. + /// or is null. + public ContentUnderstandingClient(Uri endpoint, AzureKeyCredential credential, ContentUnderstandingClientOptions options) + { + Argument.AssertNotNull(endpoint, nameof(endpoint)); + Argument.AssertNotNull(credential, nameof(credential)); + + options ??= new ContentUnderstandingClientOptions(); + + _endpoint = endpoint; + _keyCredential = credential; + Pipeline = HttpPipelineBuilder.Build(options, new HttpPipelinePolicy[] { new AzureKeyCredentialPolicy(_keyCredential, AuthorizationHeader) }); + _apiVersion = options.Version; + ClientDiagnostics = new ClientDiagnostics(options, true); + } + + /// Initializes a new instance of ContentUnderstandingClient. + /// Service endpoint. + /// A credential used to authenticate to the service. + /// The options for configuring the client. + /// or is null. + public ContentUnderstandingClient(Uri endpoint, TokenCredential credential, ContentUnderstandingClientOptions options) + { + Argument.AssertNotNull(endpoint, nameof(endpoint)); + Argument.AssertNotNull(credential, nameof(credential)); + + options ??= new ContentUnderstandingClientOptions(); + + _endpoint = endpoint; + _tokenCredential = credential; + Pipeline = HttpPipelineBuilder.Build(options, new HttpPipelinePolicy[] { new BearerTokenAuthenticationPolicy(_tokenCredential, AuthorizationScopes) }); + _apiVersion = options.Version; + ClientDiagnostics = new ClientDiagnostics(options, true); + } + + /// The HTTP pipeline for sending and receiving REST requests and responses. + public virtual HttpPipeline Pipeline { get; } + + /// The ClientDiagnostics is used to provide tracing support for the client library. + internal ClientDiagnostics ClientDiagnostics { get; } + + /// Extract content and fields from input. + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// The unique identifier of the analyzer. + /// The content to send as the body of the request. + /// + /// The string encoding format for content spans in the response. + /// Possible values are 'codePoint', 'utf16', and `utf8`. Default is `codePoint`.") + /// + /// The location where the data may be processed. Defaults to global. + /// The request options, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// The response returned from the service. + public virtual Operation Analyze(WaitUntil waitUntil, string analyzerId, RequestContent content, string stringEncoding = default, string processingLocation = default, RequestContext context = null) + { + using DiagnosticScope scope = ClientDiagnostics.CreateScope("ContentUnderstandingClient.Analyze"); + scope.Start(); + try + { + Argument.AssertNotNullOrEmpty(analyzerId, nameof(analyzerId)); + Argument.AssertNotNull(content, nameof(content)); + + using HttpMessage message = CreateAnalyzeRequest(analyzerId, content, stringEncoding, processingLocation, context); + return ProtocolOperationHelpers.ProcessMessage(Pipeline, message, ClientDiagnostics, "ContentUnderstandingClient.Analyze", OperationFinalStateVia.OperationLocation, context, waitUntil); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Extract content and fields from input. + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// The unique identifier of the analyzer. + /// The content to send as the body of the request. + /// + /// The string encoding format for content spans in the response. + /// Possible values are 'codePoint', 'utf16', and `utf8`. Default is `codePoint`.") + /// + /// The location where the data may be processed. Defaults to global. + /// The request options, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// The response returned from the service. + public virtual async Task> AnalyzeAsync(WaitUntil waitUntil, string analyzerId, RequestContent content, string stringEncoding = default, string processingLocation = default, RequestContext context = null) + { + using DiagnosticScope scope = ClientDiagnostics.CreateScope("ContentUnderstandingClient.Analyze"); + scope.Start(); + try + { + Argument.AssertNotNullOrEmpty(analyzerId, nameof(analyzerId)); + Argument.AssertNotNull(content, nameof(content)); + + using HttpMessage message = CreateAnalyzeRequest(analyzerId, content, stringEncoding, processingLocation, context); + return await ProtocolOperationHelpers.ProcessMessageAsync(Pipeline, message, ClientDiagnostics, "ContentUnderstandingClient.AnalyzeAsync", OperationFinalStateVia.OperationLocation, context, waitUntil).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Extract content and fields from input. + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// The unique identifier of the analyzer. + /// Request content type. + /// The content to send as the body of the request. + /// + /// The string encoding format for content spans in the response. + /// Possible values are 'codePoint', 'utf16', and `utf8`. Default is `codePoint`.") + /// + /// The location where the data may be processed. Defaults to global. + /// Range of the input to analyze (ex. `1-3,5,9-`). Document content uses 1-based page numbers, while audio visual content uses integer milliseconds. + /// The request options, which can override default behaviors of the client pipeline on a per-call basis. + /// , or is null. + /// or is an empty string, and was expected to be non-empty. + /// The response returned from the service. + public virtual Operation AnalyzeBinary(WaitUntil waitUntil, string analyzerId, string contentType, RequestContent content, string stringEncoding = default, string processingLocation = default, string inputRange = default, RequestContext context = null) + { + using DiagnosticScope scope = ClientDiagnostics.CreateScope("ContentUnderstandingClient.AnalyzeBinary"); + scope.Start(); + try + { + Argument.AssertNotNullOrEmpty(analyzerId, nameof(analyzerId)); + Argument.AssertNotNullOrEmpty(contentType, nameof(contentType)); + Argument.AssertNotNull(content, nameof(content)); + + using HttpMessage message = CreateAnalyzeBinaryRequest(analyzerId, contentType, content, stringEncoding, processingLocation, inputRange, context); + return ProtocolOperationHelpers.ProcessMessage(Pipeline, message, ClientDiagnostics, "ContentUnderstandingClient.AnalyzeBinary", OperationFinalStateVia.OperationLocation, context, waitUntil); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Extract content and fields from input. + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// The unique identifier of the analyzer. + /// Request content type. + /// The content to send as the body of the request. + /// + /// The string encoding format for content spans in the response. + /// Possible values are 'codePoint', 'utf16', and `utf8`. Default is `codePoint`.") + /// + /// The location where the data may be processed. Defaults to global. + /// Range of the input to analyze (ex. `1-3,5,9-`). Document content uses 1-based page numbers, while audio visual content uses integer milliseconds. + /// The request options, which can override default behaviors of the client pipeline on a per-call basis. + /// , or is null. + /// or is an empty string, and was expected to be non-empty. + /// The response returned from the service. + public virtual async Task> AnalyzeBinaryAsync(WaitUntil waitUntil, string analyzerId, string contentType, RequestContent content, string stringEncoding = default, string processingLocation = default, string inputRange = default, RequestContext context = null) + { + using DiagnosticScope scope = ClientDiagnostics.CreateScope("ContentUnderstandingClient.AnalyzeBinary"); + scope.Start(); + try + { + Argument.AssertNotNullOrEmpty(analyzerId, nameof(analyzerId)); + Argument.AssertNotNullOrEmpty(contentType, nameof(contentType)); + Argument.AssertNotNull(content, nameof(content)); + + using HttpMessage message = CreateAnalyzeBinaryRequest(analyzerId, contentType, content, stringEncoding, processingLocation, inputRange, context); + return await ProtocolOperationHelpers.ProcessMessageAsync(Pipeline, message, ClientDiagnostics, "ContentUnderstandingClient.AnalyzeBinaryAsync", OperationFinalStateVia.OperationLocation, context, waitUntil).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Create a copy of the source analyzer to the current location. + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// The unique identifier of the analyzer. + /// The content to send as the body of the request. + /// Allow the operation to replace an existing resource. + /// The request options, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// The response returned from the service. + public virtual Operation CopyAnalyzer(WaitUntil waitUntil, string analyzerId, RequestContent content, bool? allowReplace = default, RequestContext context = null) + { + using DiagnosticScope scope = ClientDiagnostics.CreateScope("ContentUnderstandingClient.CopyAnalyzer"); + scope.Start(); + try + { + Argument.AssertNotNullOrEmpty(analyzerId, nameof(analyzerId)); + Argument.AssertNotNull(content, nameof(content)); + + using HttpMessage message = CreateCopyAnalyzerRequest(analyzerId, content, allowReplace, context); + return ProtocolOperationHelpers.ProcessMessage(Pipeline, message, ClientDiagnostics, "ContentUnderstandingClient.CopyAnalyzer", OperationFinalStateVia.OperationLocation, context, waitUntil); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Create a copy of the source analyzer to the current location. + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// The unique identifier of the analyzer. + /// The content to send as the body of the request. + /// Allow the operation to replace an existing resource. + /// The request options, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// The response returned from the service. + public virtual async Task> CopyAnalyzerAsync(WaitUntil waitUntil, string analyzerId, RequestContent content, bool? allowReplace = default, RequestContext context = null) + { + using DiagnosticScope scope = ClientDiagnostics.CreateScope("ContentUnderstandingClient.CopyAnalyzer"); + scope.Start(); + try + { + Argument.AssertNotNullOrEmpty(analyzerId, nameof(analyzerId)); + Argument.AssertNotNull(content, nameof(content)); + + using HttpMessage message = CreateCopyAnalyzerRequest(analyzerId, content, allowReplace, context); + return await ProtocolOperationHelpers.ProcessMessageAsync(Pipeline, message, ClientDiagnostics, "ContentUnderstandingClient.CopyAnalyzerAsync", OperationFinalStateVia.OperationLocation, context, waitUntil).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Create a copy of the source analyzer to the current location. + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// The unique identifier of the analyzer. + /// Source analyzer ID. + /// Azure resource ID of the source analyzer location. Defaults to the current resource. + /// Azure region of the source analyzer location. Defaults to current region. + /// Allow the operation to replace an existing resource. + /// The cancellation token that can be used to cancel the operation. + /// or is null. + /// or is an empty string, and was expected to be non-empty. + public virtual Operation CopyAnalyzer(WaitUntil waitUntil, string analyzerId, string sourceAnalyzerId, string sourceAzureResourceId = default, string sourceRegion = default, bool? allowReplace = default, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(analyzerId, nameof(analyzerId)); + Argument.AssertNotNullOrEmpty(sourceAnalyzerId, nameof(sourceAnalyzerId)); + + CopyAnalyzerRequest spreadModel = new CopyAnalyzerRequest(sourceAzureResourceId, sourceRegion, sourceAnalyzerId, default); + Operation result = CopyAnalyzer(waitUntil, analyzerId, spreadModel, allowReplace, cancellationToken.ToRequestContext()); + return ProtocolOperationHelpers.Convert(result, response => ContentAnalyzer.FromLroResponse(response), ClientDiagnostics, "ContentUnderstandingClient.CopyAnalyzer"); + } + + /// Create a copy of the source analyzer to the current location. + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// The unique identifier of the analyzer. + /// Source analyzer ID. + /// Azure resource ID of the source analyzer location. Defaults to the current resource. + /// Azure region of the source analyzer location. Defaults to current region. + /// Allow the operation to replace an existing resource. + /// The cancellation token that can be used to cancel the operation. + /// or is null. + /// or is an empty string, and was expected to be non-empty. + public virtual async Task> CopyAnalyzerAsync(WaitUntil waitUntil, string analyzerId, string sourceAnalyzerId, string sourceAzureResourceId = default, string sourceRegion = default, bool? allowReplace = default, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(analyzerId, nameof(analyzerId)); + Argument.AssertNotNullOrEmpty(sourceAnalyzerId, nameof(sourceAnalyzerId)); + + CopyAnalyzerRequest spreadModel = new CopyAnalyzerRequest(sourceAzureResourceId, sourceRegion, sourceAnalyzerId, default); + Operation result = await CopyAnalyzerAsync(waitUntil, analyzerId, spreadModel, allowReplace, cancellationToken.ToRequestContext()).ConfigureAwait(false); + return ProtocolOperationHelpers.Convert(result, response => ContentAnalyzer.FromLroResponse(response), ClientDiagnostics, "ContentUnderstandingClient.CopyAnalyzerAsync"); + } + + /// Create a new analyzer asynchronously. + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// The unique identifier of the analyzer. + /// The content to send as the body of the request. + /// Allow the operation to replace an existing resource. + /// The request options, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// The response returned from the service. + public virtual Operation CreateAnalyzer(WaitUntil waitUntil, string analyzerId, RequestContent content, bool? allowReplace = default, RequestContext context = null) + { + using DiagnosticScope scope = ClientDiagnostics.CreateScope("ContentUnderstandingClient.CreateAnalyzer"); + scope.Start(); + try + { + Argument.AssertNotNullOrEmpty(analyzerId, nameof(analyzerId)); + Argument.AssertNotNull(content, nameof(content)); + + using HttpMessage message = CreateCreateAnalyzerRequest(analyzerId, content, allowReplace, context); + return ProtocolOperationHelpers.ProcessMessage(Pipeline, message, ClientDiagnostics, "ContentUnderstandingClient.CreateAnalyzer", OperationFinalStateVia.OriginalUri, context, waitUntil); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Create a new analyzer asynchronously. + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// The unique identifier of the analyzer. + /// The content to send as the body of the request. + /// Allow the operation to replace an existing resource. + /// The request options, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// The response returned from the service. + public virtual async Task> CreateAnalyzerAsync(WaitUntil waitUntil, string analyzerId, RequestContent content, bool? allowReplace = default, RequestContext context = null) + { + using DiagnosticScope scope = ClientDiagnostics.CreateScope("ContentUnderstandingClient.CreateAnalyzer"); + scope.Start(); + try + { + Argument.AssertNotNullOrEmpty(analyzerId, nameof(analyzerId)); + Argument.AssertNotNull(content, nameof(content)); + + using HttpMessage message = CreateCreateAnalyzerRequest(analyzerId, content, allowReplace, context); + return await ProtocolOperationHelpers.ProcessMessageAsync(Pipeline, message, ClientDiagnostics, "ContentUnderstandingClient.CreateAnalyzerAsync", OperationFinalStateVia.OriginalUri, context, waitUntil).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Create a new analyzer asynchronously. + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// The unique identifier of the analyzer. + /// The resource instance. + /// Allow the operation to replace an existing resource. + /// The cancellation token that can be used to cancel the operation. + /// or is null. + /// is an empty string, and was expected to be non-empty. + public virtual Operation CreateAnalyzer(WaitUntil waitUntil, string analyzerId, ContentAnalyzer resource, bool? allowReplace = default, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(analyzerId, nameof(analyzerId)); + Argument.AssertNotNull(resource, nameof(resource)); + + Operation result = CreateAnalyzer(waitUntil, analyzerId, resource, allowReplace, cancellationToken.ToRequestContext()); + return ProtocolOperationHelpers.Convert(result, response => ContentAnalyzer.FromLroResponse(response), ClientDiagnostics, "ContentUnderstandingClient.CreateAnalyzer"); + } + + /// Create a new analyzer asynchronously. + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// The unique identifier of the analyzer. + /// The resource instance. + /// Allow the operation to replace an existing resource. + /// The cancellation token that can be used to cancel the operation. + /// or is null. + /// is an empty string, and was expected to be non-empty. + public virtual async Task> CreateAnalyzerAsync(WaitUntil waitUntil, string analyzerId, ContentAnalyzer resource, bool? allowReplace = default, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(analyzerId, nameof(analyzerId)); + Argument.AssertNotNull(resource, nameof(resource)); + + Operation result = await CreateAnalyzerAsync(waitUntil, analyzerId, resource, allowReplace, cancellationToken.ToRequestContext()).ConfigureAwait(false); + return ProtocolOperationHelpers.Convert(result, response => ContentAnalyzer.FromLroResponse(response), ClientDiagnostics, "ContentUnderstandingClient.CreateAnalyzerAsync"); + } + + /// + /// [Protocol Method] Delete analyzer. + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// The unique identifier of the analyzer. + /// The request options, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + public virtual Response DeleteAnalyzer(string analyzerId, RequestContext context) + { + using DiagnosticScope scope = ClientDiagnostics.CreateScope("ContentUnderstandingClient.DeleteAnalyzer"); + scope.Start(); + try + { + Argument.AssertNotNullOrEmpty(analyzerId, nameof(analyzerId)); + + using HttpMessage message = CreateDeleteAnalyzerRequest(analyzerId, context); + return Pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Delete analyzer. + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// The unique identifier of the analyzer. + /// The request options, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + public virtual async Task DeleteAnalyzerAsync(string analyzerId, RequestContext context) + { + using DiagnosticScope scope = ClientDiagnostics.CreateScope("ContentUnderstandingClient.DeleteAnalyzer"); + scope.Start(); + try + { + Argument.AssertNotNullOrEmpty(analyzerId, nameof(analyzerId)); + + using HttpMessage message = CreateDeleteAnalyzerRequest(analyzerId, context); + return await Pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Delete analyzer. + /// The unique identifier of the analyzer. + /// The cancellation token that can be used to cancel the operation. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + public virtual Response DeleteAnalyzer(string analyzerId, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(analyzerId, nameof(analyzerId)); + + return DeleteAnalyzer(analyzerId, cancellationToken.ToRequestContext()); + } + + /// Delete analyzer. + /// The unique identifier of the analyzer. + /// The cancellation token that can be used to cancel the operation. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + public virtual async Task DeleteAnalyzerAsync(string analyzerId, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(analyzerId, nameof(analyzerId)); + + return await DeleteAnalyzerAsync(analyzerId, cancellationToken.ToRequestContext()).ConfigureAwait(false); + } + + /// + /// [Protocol Method] Mark the result of an analysis operation for deletion. + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// Operation identifier. + /// The request options, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + public virtual Response DeleteResult(string operationId, RequestContext context) + { + using DiagnosticScope scope = ClientDiagnostics.CreateScope("ContentUnderstandingClient.DeleteResult"); + scope.Start(); + try + { + Argument.AssertNotNullOrEmpty(operationId, nameof(operationId)); + + using HttpMessage message = CreateDeleteResultRequest(operationId, context); + return Pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Mark the result of an analysis operation for deletion. + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// Operation identifier. + /// The request options, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + public virtual async Task DeleteResultAsync(string operationId, RequestContext context) + { + using DiagnosticScope scope = ClientDiagnostics.CreateScope("ContentUnderstandingClient.DeleteResult"); + scope.Start(); + try + { + Argument.AssertNotNullOrEmpty(operationId, nameof(operationId)); + + using HttpMessage message = CreateDeleteResultRequest(operationId, context); + return await Pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Mark the result of an analysis operation for deletion. + /// Operation identifier. + /// The cancellation token that can be used to cancel the operation. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + public virtual Response DeleteResult(string operationId, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(operationId, nameof(operationId)); + + return DeleteResult(operationId, cancellationToken.ToRequestContext()); + } + + /// Mark the result of an analysis operation for deletion. + /// Operation identifier. + /// The cancellation token that can be used to cancel the operation. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + public virtual async Task DeleteResultAsync(string operationId, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(operationId, nameof(operationId)); + + return await DeleteResultAsync(operationId, cancellationToken.ToRequestContext()).ConfigureAwait(false); + } + + /// + /// [Protocol Method] Get analyzer properties. + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// The unique identifier of the analyzer. + /// The request options, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + public virtual Response GetAnalyzer(string analyzerId, RequestContext context) + { + using DiagnosticScope scope = ClientDiagnostics.CreateScope("ContentUnderstandingClient.GetAnalyzer"); + scope.Start(); + try + { + Argument.AssertNotNullOrEmpty(analyzerId, nameof(analyzerId)); + + using HttpMessage message = CreateGetAnalyzerRequest(analyzerId, context); + return Pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Get analyzer properties. + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// The unique identifier of the analyzer. + /// The request options, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + public virtual async Task GetAnalyzerAsync(string analyzerId, RequestContext context) + { + using DiagnosticScope scope = ClientDiagnostics.CreateScope("ContentUnderstandingClient.GetAnalyzer"); + scope.Start(); + try + { + Argument.AssertNotNullOrEmpty(analyzerId, nameof(analyzerId)); + + using HttpMessage message = CreateGetAnalyzerRequest(analyzerId, context); + return await Pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Get analyzer properties. + /// The unique identifier of the analyzer. + /// The cancellation token that can be used to cancel the operation. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + public virtual Response GetAnalyzer(string analyzerId, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(analyzerId, nameof(analyzerId)); + + Response result = GetAnalyzer(analyzerId, cancellationToken.ToRequestContext()); + return Response.FromValue((ContentAnalyzer)result, result); + } + + /// Get analyzer properties. + /// The unique identifier of the analyzer. + /// The cancellation token that can be used to cancel the operation. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + public virtual async Task> GetAnalyzerAsync(string analyzerId, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(analyzerId, nameof(analyzerId)); + + Response result = await GetAnalyzerAsync(analyzerId, cancellationToken.ToRequestContext()).ConfigureAwait(false); + return Response.FromValue((ContentAnalyzer)result, result); + } + + /// + /// [Protocol Method] Return default settings for this Content Understanding resource. + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// The request options, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + public virtual Response GetDefaults(RequestContext context) + { + using DiagnosticScope scope = ClientDiagnostics.CreateScope("ContentUnderstandingClient.GetDefaults"); + scope.Start(); + try + { + using HttpMessage message = CreateGetDefaultsRequest(context); + return Pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Return default settings for this Content Understanding resource. + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// The request options, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + public virtual async Task GetDefaultsAsync(RequestContext context) + { + using DiagnosticScope scope = ClientDiagnostics.CreateScope("ContentUnderstandingClient.GetDefaults"); + scope.Start(); + try + { + using HttpMessage message = CreateGetDefaultsRequest(context); + return await Pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Return default settings for this Content Understanding resource. + /// The cancellation token that can be used to cancel the operation. + /// Service returned a non-success status code. + public virtual Response GetDefaults(CancellationToken cancellationToken = default) + { + Response result = GetDefaults(cancellationToken.ToRequestContext()); + return Response.FromValue((ContentUnderstandingDefaults)result, result); + } + + /// Return default settings for this Content Understanding resource. + /// The cancellation token that can be used to cancel the operation. + /// Service returned a non-success status code. + public virtual async Task> GetDefaultsAsync(CancellationToken cancellationToken = default) + { + Response result = await GetDefaultsAsync(cancellationToken.ToRequestContext()).ConfigureAwait(false); + return Response.FromValue((ContentUnderstandingDefaults)result, result); + } + + /// + /// [Protocol Method] Get the status of an analyzer creation operation. + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// The unique identifier of the analyzer. + /// The unique ID of the operation. + /// The request options, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + internal virtual Response GetOperationStatus(string analyzerId, string operationId, RequestContext context) + { + using DiagnosticScope scope = ClientDiagnostics.CreateScope("ContentUnderstandingClient.GetOperationStatus"); + scope.Start(); + try + { + using HttpMessage message = CreateGetOperationStatusRequest(analyzerId, operationId, context); + return Pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Get the status of an analyzer creation operation. + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// The unique identifier of the analyzer. + /// The unique ID of the operation. + /// The request options, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + internal virtual async Task GetOperationStatusAsync(string analyzerId, string operationId, RequestContext context) + { + using DiagnosticScope scope = ClientDiagnostics.CreateScope("ContentUnderstandingClient.GetOperationStatus"); + scope.Start(); + try + { + using HttpMessage message = CreateGetOperationStatusRequest(analyzerId, operationId, context); + return await Pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Get the status of an analyzer creation operation. + /// The unique identifier of the analyzer. + /// The unique ID of the operation. + /// The cancellation token that can be used to cancel the operation. + /// Service returned a non-success status code. + internal virtual Response GetOperationStatus(string analyzerId, string operationId, CancellationToken cancellationToken = default) + { + Response result = GetOperationStatus(analyzerId, operationId, cancellationToken.ToRequestContext()); + return Response.FromValue((ContentAnalyzerOperationStatus)result, result); + } + + /// Get the status of an analyzer creation operation. + /// The unique identifier of the analyzer. + /// The unique ID of the operation. + /// The cancellation token that can be used to cancel the operation. + /// Service returned a non-success status code. + internal virtual async Task> GetOperationStatusAsync(string analyzerId, string operationId, CancellationToken cancellationToken = default) + { + Response result = await GetOperationStatusAsync(analyzerId, operationId, cancellationToken.ToRequestContext()).ConfigureAwait(false); + return Response.FromValue((ContentAnalyzerOperationStatus)result, result); + } + + /// + /// [Protocol Method] Get the result of an analysis operation. + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// The unique ID of the operation. + /// The request options, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + internal virtual Response GetResult(string operationId, RequestContext context) + { + using DiagnosticScope scope = ClientDiagnostics.CreateScope("ContentUnderstandingClient.GetResult"); + scope.Start(); + try + { + using HttpMessage message = CreateGetResultRequest(operationId, context); + return Pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Get the result of an analysis operation. + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// The unique ID of the operation. + /// The request options, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + internal virtual async Task GetResultAsync(string operationId, RequestContext context) + { + using DiagnosticScope scope = ClientDiagnostics.CreateScope("ContentUnderstandingClient.GetResult"); + scope.Start(); + try + { + using HttpMessage message = CreateGetResultRequest(operationId, context); + return await Pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Get the result of an analysis operation. + /// The unique ID of the operation. + /// The cancellation token that can be used to cancel the operation. + /// Service returned a non-success status code. + internal virtual Response GetResult(string operationId, CancellationToken cancellationToken = default) + { + Response result = GetResult(operationId, cancellationToken.ToRequestContext()); + return Response.FromValue((ContentAnalyzerAnalyzeOperationStatus)result, result); + } + + /// Get the result of an analysis operation. + /// The unique ID of the operation. + /// The cancellation token that can be used to cancel the operation. + /// Service returned a non-success status code. + internal virtual async Task> GetResultAsync(string operationId, CancellationToken cancellationToken = default) + { + Response result = await GetResultAsync(operationId, cancellationToken.ToRequestContext()).ConfigureAwait(false); + return Response.FromValue((ContentAnalyzerAnalyzeOperationStatus)result, result); + } + + /// + /// [Protocol Method] Get a file associated with the result of an analysis operation. + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// Operation identifier. + /// File path. + /// The request options, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// or is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + public virtual Response GetResultFile(string operationId, string path, RequestContext context) + { + using DiagnosticScope scope = ClientDiagnostics.CreateScope("ContentUnderstandingClient.GetResultFile"); + scope.Start(); + try + { + Argument.AssertNotNullOrEmpty(operationId, nameof(operationId)); + Argument.AssertNotNullOrEmpty(path, nameof(path)); + + using HttpMessage message = CreateGetResultFileRequest(operationId, path, context); + return Pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Get a file associated with the result of an analysis operation. + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// Operation identifier. + /// File path. + /// The request options, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// or is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + public virtual async Task GetResultFileAsync(string operationId, string path, RequestContext context) + { + using DiagnosticScope scope = ClientDiagnostics.CreateScope("ContentUnderstandingClient.GetResultFile"); + scope.Start(); + try + { + Argument.AssertNotNullOrEmpty(operationId, nameof(operationId)); + Argument.AssertNotNullOrEmpty(path, nameof(path)); + + using HttpMessage message = CreateGetResultFileRequest(operationId, path, context); + return await Pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Get a file associated with the result of an analysis operation. + /// Operation identifier. + /// File path. + /// The cancellation token that can be used to cancel the operation. + /// or is null. + /// or is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + public virtual Response GetResultFile(string operationId, string path, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(operationId, nameof(operationId)); + Argument.AssertNotNullOrEmpty(path, nameof(path)); + + Response result = GetResultFile(operationId, path, cancellationToken.ToRequestContext()); + return Response.FromValue(result.Content, result); + } + + /// Get a file associated with the result of an analysis operation. + /// Operation identifier. + /// File path. + /// The cancellation token that can be used to cancel the operation. + /// or is null. + /// or is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + public virtual async Task> GetResultFileAsync(string operationId, string path, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(operationId, nameof(operationId)); + Argument.AssertNotNullOrEmpty(path, nameof(path)); + + Response result = await GetResultFileAsync(operationId, path, cancellationToken.ToRequestContext()).ConfigureAwait(false); + return Response.FromValue(result.Content, result); + } + + /// + /// [Protocol Method] Get authorization for copying this analyzer to another location. + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// The unique identifier of the analyzer. + /// The content to send as the body of the request. + /// The request options, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + public virtual Response GrantCopyAuthorization(string analyzerId, RequestContent content, RequestContext context = null) + { + using DiagnosticScope scope = ClientDiagnostics.CreateScope("ContentUnderstandingClient.GrantCopyAuthorization"); + scope.Start(); + try + { + Argument.AssertNotNullOrEmpty(analyzerId, nameof(analyzerId)); + Argument.AssertNotNull(content, nameof(content)); + + using HttpMessage message = CreateGrantCopyAuthorizationRequest(analyzerId, content, context); + return Pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Get authorization for copying this analyzer to another location. + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// The unique identifier of the analyzer. + /// The content to send as the body of the request. + /// The request options, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + public virtual async Task GrantCopyAuthorizationAsync(string analyzerId, RequestContent content, RequestContext context = null) + { + using DiagnosticScope scope = ClientDiagnostics.CreateScope("ContentUnderstandingClient.GrantCopyAuthorization"); + scope.Start(); + try + { + Argument.AssertNotNullOrEmpty(analyzerId, nameof(analyzerId)); + Argument.AssertNotNull(content, nameof(content)); + + using HttpMessage message = CreateGrantCopyAuthorizationRequest(analyzerId, content, context); + return await Pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Get authorization for copying this analyzer to another location. + /// The unique identifier of the analyzer. + /// Azure resource ID of the target analyzer location. + /// Azure region of the target analyzer location. Defaults to current region. + /// The cancellation token that can be used to cancel the operation. + /// or is null. + /// or is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + public virtual Response GrantCopyAuthorization(string analyzerId, string targetAzureResourceId, string targetRegion = default, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(analyzerId, nameof(analyzerId)); + Argument.AssertNotNullOrEmpty(targetAzureResourceId, nameof(targetAzureResourceId)); + + GrantCopyAuthorizationRequest1 spreadModel = new GrantCopyAuthorizationRequest1(targetAzureResourceId, targetRegion, default); + Response result = GrantCopyAuthorization(analyzerId, spreadModel, cancellationToken.ToRequestContext()); + return Response.FromValue((CopyAuthorization)result, result); + } + + /// Get authorization for copying this analyzer to another location. + /// The unique identifier of the analyzer. + /// Azure resource ID of the target analyzer location. + /// Azure region of the target analyzer location. Defaults to current region. + /// The cancellation token that can be used to cancel the operation. + /// or is null. + /// or is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + public virtual async Task> GrantCopyAuthorizationAsync(string analyzerId, string targetAzureResourceId, string targetRegion = default, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(analyzerId, nameof(analyzerId)); + Argument.AssertNotNullOrEmpty(targetAzureResourceId, nameof(targetAzureResourceId)); + + GrantCopyAuthorizationRequest1 spreadModel = new GrantCopyAuthorizationRequest1(targetAzureResourceId, targetRegion, default); + Response result = await GrantCopyAuthorizationAsync(analyzerId, spreadModel, cancellationToken.ToRequestContext()).ConfigureAwait(false); + return Response.FromValue((CopyAuthorization)result, result); + } + + /// + /// [Protocol Method] List analyzers. + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// The request options, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + public virtual Pageable GetAnalyzers(RequestContext context) + { + return new ContentUnderstandingClientGetAnalyzersCollectionResult(this, context); + } + + /// + /// [Protocol Method] List analyzers. + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// The request options, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + public virtual AsyncPageable GetAnalyzersAsync(RequestContext context) + { + return new ContentUnderstandingClientGetAnalyzersAsyncCollectionResult(this, context); + } + + /// List analyzers. + /// The cancellation token that can be used to cancel the operation. + /// Service returned a non-success status code. + public virtual Pageable GetAnalyzers(CancellationToken cancellationToken = default) + { + return new ContentUnderstandingClientGetAnalyzersCollectionResultOfT(this, cancellationToken.ToRequestContext()); + } + + /// List analyzers. + /// The cancellation token that can be used to cancel the operation. + /// Service returned a non-success status code. + public virtual AsyncPageable GetAnalyzersAsync(CancellationToken cancellationToken = default) + { + return new ContentUnderstandingClientGetAnalyzersAsyncCollectionResultOfT(this, cancellationToken.ToRequestContext()); + } + + /// + /// [Protocol Method] Update analyzer properties. + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// The unique identifier of the analyzer. + /// The content to send as the body of the request. + /// The request options, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + public virtual Response UpdateAnalyzer(string analyzerId, RequestContent content, RequestContext context = null) + { + using DiagnosticScope scope = ClientDiagnostics.CreateScope("ContentUnderstandingClient.UpdateAnalyzer"); + scope.Start(); + try + { + Argument.AssertNotNullOrEmpty(analyzerId, nameof(analyzerId)); + Argument.AssertNotNull(content, nameof(content)); + + using HttpMessage message = CreateUpdateAnalyzerRequest(analyzerId, content, context); + return Pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Update analyzer properties. + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// The unique identifier of the analyzer. + /// The content to send as the body of the request. + /// The request options, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + public virtual async Task UpdateAnalyzerAsync(string analyzerId, RequestContent content, RequestContext context = null) + { + using DiagnosticScope scope = ClientDiagnostics.CreateScope("ContentUnderstandingClient.UpdateAnalyzer"); + scope.Start(); + try + { + Argument.AssertNotNullOrEmpty(analyzerId, nameof(analyzerId)); + Argument.AssertNotNull(content, nameof(content)); + + using HttpMessage message = CreateUpdateAnalyzerRequest(analyzerId, content, context); + return await Pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Return default settings for this Content Understanding resource. + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// The content to send as the body of the request. + /// The request options, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + public virtual Response UpdateDefaults(RequestContent content, RequestContext context = null) + { + using DiagnosticScope scope = ClientDiagnostics.CreateScope("ContentUnderstandingClient.UpdateDefaults"); + scope.Start(); + try + { + Argument.AssertNotNull(content, nameof(content)); + + using HttpMessage message = CreateUpdateDefaultsRequest(content, context); + return Pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Return default settings for this Content Understanding resource. + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// The content to send as the body of the request. + /// The request options, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + public virtual async Task UpdateDefaultsAsync(RequestContent content, RequestContext context = null) + { + using DiagnosticScope scope = ClientDiagnostics.CreateScope("ContentUnderstandingClient.UpdateDefaults"); + scope.Start(); + try + { + Argument.AssertNotNull(content, nameof(content)); + + using HttpMessage message = CreateUpdateDefaultsRequest(content, context); + return await Pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/ContentUnderstandingClientBuilderExtensions.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/ContentUnderstandingClientBuilderExtensions.cs new file mode 100644 index 000000000000..f61dbb92271b --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/ContentUnderstandingClientBuilderExtensions.cs @@ -0,0 +1,56 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Diagnostics.CodeAnalysis; +using Azure; +using Azure.AI.ContentUnderstanding; +using Azure.Core.Extensions; + +namespace Microsoft.Extensions.Azure +{ + /// Extension methods to add clients to . + public static partial class ContentUnderstandingClientBuilderExtensions + { + /// Registers a client with the specified . + /// The builder to register with. + /// Service endpoint. + /// A credential used to authenticate to the service. + /// or is null. + public static IAzureClientBuilder AddContentUnderstandingClient(this TBuilder builder, Uri endpoint, AzureKeyCredential credential) + where TBuilder : IAzureClientFactoryBuilder + { + Argument.AssertNotNull(endpoint, nameof(endpoint)); + Argument.AssertNotNull(credential, nameof(credential)); + + return builder.RegisterClientFactory(options => new ContentUnderstandingClient(endpoint, credential, options)); + } + + /// Registers a client with the specified . + /// The builder to register with. + /// Service endpoint. + /// is null. + public static IAzureClientBuilder AddContentUnderstandingClient(this TBuilder builder, Uri endpoint) + where TBuilder : IAzureClientFactoryBuilderWithCredential + { + Argument.AssertNotNull(endpoint, nameof(endpoint)); + + return builder.RegisterClientFactory((options, credential) => new ContentUnderstandingClient(endpoint, credential, options)); + } + + /// Registers a client with the specified . + /// The builder to register with. + /// The configuration to use for the client. + [RequiresUnreferencedCode("Requires unreferenced code until we opt into EnableConfigurationBindingGenerator.")] + [RequiresDynamicCode("Requires unreferenced code until we opt into EnableConfigurationBindingGenerator.")] + public static IAzureClientBuilder AddContentUnderstandingClient(this TBuilder builder, TConfiguration configuration) + where TBuilder : IAzureClientFactoryBuilderWithConfiguration + { + return builder.RegisterClientFactory(configuration); + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/ContentUnderstandingClientOptions.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/ContentUnderstandingClientOptions.cs new file mode 100644 index 000000000000..8c58f55ba02b --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/ContentUnderstandingClientOptions.cs @@ -0,0 +1,39 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using Azure.Core; + +namespace Azure.AI.ContentUnderstanding +{ + /// Client options for . + public partial class ContentUnderstandingClientOptions : ClientOptions + { + private const ServiceVersion LatestVersion = ServiceVersion.V2025_11_01; + + /// Initializes a new instance of ContentUnderstandingClientOptions. + /// The service version. + public ContentUnderstandingClientOptions(ServiceVersion version = LatestVersion) + { + Version = version switch + { + ServiceVersion.V2025_11_01 => "2025-11-01", + _ => throw new NotSupportedException() + }; + } + + /// Gets the Version. + internal string Version { get; } + + /// The version of the service to use. + public enum ServiceVersion + { + /// The 2025-11-01 version of the Content Understanding service. + V2025_11_01 = 1 + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/ContentUnderstandingModelFactory.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/ContentUnderstandingModelFactory.cs new file mode 100644 index 000000000000..ecff2361c97f --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/ContentUnderstandingModelFactory.cs @@ -0,0 +1,1030 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; +using Azure; + +namespace Azure.AI.ContentUnderstanding +{ + /// A factory class for creating instances of the models for mocking. + public static partial class ContentUnderstandingModelFactory + { + /// Additional input to analyze. + /// The URL of the input to analyze. Only one of url or data should be specified. + /// Raw image bytes. Provide bytes-like object; do not base64-encode. Only one of url or data should be specified. + /// Name of the input. + /// The MIME type of the input content. Ex. application/pdf, image/jpeg, etc. + /// Range of the input to analyze (ex. `1-3,5,9-`). Document content uses 1-based page numbers, while audio visual content uses integer milliseconds. + /// A new instance for mocking. + public static AnalyzeInput AnalyzeInput(Uri url = default, BinaryData data = default, string name = default, string mimeType = default, string inputRange = default) + { + return new AnalyzeInput( + url, + data, + name, + mimeType, + inputRange, + additionalBinaryDataProperties: null); + } + + /// Analyze operation result. + /// The unique identifier of the analyzer. + /// The version of the API used to analyze the document. + /// The date and time when the result was created. + /// Warnings encountered while analyzing the document. + /// + /// The string encoding format for content spans in the response. + /// Possible values are 'codePoint', 'utf16', and `utf8`. Default is `codePoint`.") + /// + /// The extracted content. + /// A new instance for mocking. + public static AnalyzeResult AnalyzeResult(string analyzerId = default, string apiVersion = default, DateTimeOffset? createdAt = default, IEnumerable warnings = default, string stringEncoding = default, IEnumerable contents = default) + { + warnings ??= new ChangeTrackingList(); + contents ??= new ChangeTrackingList(); + + return new AnalyzeResult( + analyzerId, + apiVersion, + createdAt, + warnings.ToList(), + stringEncoding, + contents.ToList(), + additionalBinaryDataProperties: null); + } + + /// + /// Media content base class. + /// Please note this is the abstract base class. The derived classes available for instantiation are: and . + /// + /// Content kind. + /// Detected MIME type of the content. Ex. application/pdf, image/jpeg, etc. + /// The analyzer that generated this content. + /// Classified content category. + /// The path of the content in the input. + /// Markdown representation of the content. + /// Extracted fields from the content. + /// A new instance for mocking. + public static MediaContent MediaContent(string kind = default, string mimeType = default, string analyzerId = default, string category = default, string path = default, string markdown = default, IDictionary fields = default) + { + fields ??= new ChangeTrackingDictionary(); + + return new UnknownMediaContent( + new MediaContentKind(kind), + mimeType, + analyzerId, + category, + path, + markdown, + fields, + additionalBinaryDataProperties: null); + } + + /// + /// Field extracted from the content. + /// Please note this is the abstract base class. The derived classes available for instantiation are: , , , , , , , , and . + /// + /// Semantic data type of the field value. + /// Span(s) associated with the field value in the markdown content. + /// Confidence of predicting the field value. + /// Encoded source that identifies the position of the field value in the content. + /// A new instance for mocking. + public static ContentField ContentField(string @type = default, IEnumerable spans = default, float? confidence = default, string source = default) + { + spans ??= new ChangeTrackingList(); + + return new UnknownContentField(new ContentFieldType(@type), spans.ToList(), confidence, source, additionalBinaryDataProperties: null); + } + + /// Position of the element in markdown, specified as a character offset and length. + /// Starting position (0-indexed) of the element in markdown, specified in characters. + /// Length of the element in markdown, specified in characters. + /// A new instance for mocking. + public static ContentSpan ContentSpan(int offset = default, int length = default) + { + return new ContentSpan(offset, length, additionalBinaryDataProperties: null); + } + + /// String field extracted from the content. + /// Span(s) associated with the field value in the markdown content. + /// Confidence of predicting the field value. + /// Encoded source that identifies the position of the field value in the content. + /// String field value. + /// A new instance for mocking. + public static StringField StringField(IEnumerable spans = default, float? confidence = default, string source = default, string valueString = default) + { + spans ??= new ChangeTrackingList(); + + return new StringField( + ContentFieldType.String, + spans.ToList(), + confidence, + source, + additionalBinaryDataProperties: null, + "string", + valueString); + } + + /// Date field extracted from the content. + /// Span(s) associated with the field value in the markdown content. + /// Confidence of predicting the field value. + /// Encoded source that identifies the position of the field value in the content. + /// Date field value, in ISO 8601 (YYYY-MM-DD) format. + /// A new instance for mocking. + public static DateField DateField(IEnumerable spans = default, float? confidence = default, string source = default, DateTimeOffset? valueDate = default) + { + spans ??= new ChangeTrackingList(); + + return new DateField( + ContentFieldType.Date, + spans.ToList(), + confidence, + source, + additionalBinaryDataProperties: null, + "date", + valueDate); + } + + /// Time field extracted from the content. + /// Span(s) associated with the field value in the markdown content. + /// Confidence of predicting the field value. + /// Encoded source that identifies the position of the field value in the content. + /// Time field value, in ISO 8601 (hh:mm:ss) format. + /// A new instance for mocking. + public static TimeField TimeField(IEnumerable spans = default, float? confidence = default, string source = default, TimeSpan? valueTime = default) + { + spans ??= new ChangeTrackingList(); + + return new TimeField( + ContentFieldType.Time, + spans.ToList(), + confidence, + source, + additionalBinaryDataProperties: null, + "time", + valueTime); + } + + /// Number field extracted from the content. + /// Span(s) associated with the field value in the markdown content. + /// Confidence of predicting the field value. + /// Encoded source that identifies the position of the field value in the content. + /// Number field value. + /// A new instance for mocking. + public static NumberField NumberField(IEnumerable spans = default, float? confidence = default, string source = default, double? valueNumber = default) + { + spans ??= new ChangeTrackingList(); + + return new NumberField( + ContentFieldType.Number, + spans.ToList(), + confidence, + source, + additionalBinaryDataProperties: null, + "number", + valueNumber); + } + + /// Integer field extracted from the content. + /// Span(s) associated with the field value in the markdown content. + /// Confidence of predicting the field value. + /// Encoded source that identifies the position of the field value in the content. + /// Integer field value. + /// A new instance for mocking. + public static IntegerField IntegerField(IEnumerable spans = default, float? confidence = default, string source = default, long? valueInteger = default) + { + spans ??= new ChangeTrackingList(); + + return new IntegerField( + ContentFieldType.Integer, + spans.ToList(), + confidence, + source, + additionalBinaryDataProperties: null, + "integer", + valueInteger); + } + + /// Boolean field extracted from the content. + /// Span(s) associated with the field value in the markdown content. + /// Confidence of predicting the field value. + /// Encoded source that identifies the position of the field value in the content. + /// Boolean field value. + /// A new instance for mocking. + public static BooleanField BooleanField(IEnumerable spans = default, float? confidence = default, string source = default, bool? valueBoolean = default) + { + spans ??= new ChangeTrackingList(); + + return new BooleanField( + ContentFieldType.Boolean, + spans.ToList(), + confidence, + source, + additionalBinaryDataProperties: null, + "boolean", + valueBoolean); + } + + /// Array field extracted from the content. + /// Span(s) associated with the field value in the markdown content. + /// Confidence of predicting the field value. + /// Encoded source that identifies the position of the field value in the content. + /// Array field value. + /// A new instance for mocking. + public static ArrayField ArrayField(IEnumerable spans = default, float? confidence = default, string source = default, IEnumerable valueArray = default) + { + spans ??= new ChangeTrackingList(); + valueArray ??= new ChangeTrackingList(); + + return new ArrayField( + ContentFieldType.Array, + spans.ToList(), + confidence, + source, + additionalBinaryDataProperties: null, + "array", + valueArray.ToList()); + } + + /// Object field extracted from the content. + /// Span(s) associated with the field value in the markdown content. + /// Confidence of predicting the field value. + /// Encoded source that identifies the position of the field value in the content. + /// Object field value. + /// A new instance for mocking. + public static ObjectField ObjectField(IEnumerable spans = default, float? confidence = default, string source = default, IDictionary valueObject = default) + { + spans ??= new ChangeTrackingList(); + valueObject ??= new ChangeTrackingDictionary(); + + return new ObjectField( + ContentFieldType.Object, + spans.ToList(), + confidence, + source, + additionalBinaryDataProperties: null, + "object", + valueObject); + } + + /// JSON field extracted from the content. + /// Span(s) associated with the field value in the markdown content. + /// Confidence of predicting the field value. + /// Encoded source that identifies the position of the field value in the content. + /// JSON field value. + /// A new instance for mocking. + public static JsonField JsonField(IEnumerable spans = default, float? confidence = default, string source = default, BinaryData valueJson = default) + { + spans ??= new ChangeTrackingList(); + + return new JsonField( + ContentFieldType.Json, + spans.ToList(), + confidence, + source, + additionalBinaryDataProperties: null, + "json", + valueJson); + } + + /// Document content. Ex. text/plain, application/pdf, image/jpeg. + /// Detected MIME type of the content. Ex. application/pdf, image/jpeg, etc. + /// The analyzer that generated this content. + /// Classified content category. + /// The path of the content in the input. + /// Markdown representation of the content. + /// Extracted fields from the content. + /// Start page number (1-indexed) of the content. + /// End page number (1-indexed) of the content. + /// + /// Length unit used by the width, height, and source properties. + /// For images/tiff, the default unit is pixel. For PDF, the default unit is inch. + /// + /// List of pages in the document. + /// List of paragraphs in the document. Only if enableOcr and returnDetails are true. + /// List of sections in the document. Only if enableLayout and returnDetails are true. + /// List of tables in the document. Only if enableLayout and returnDetails are true. + /// List of figures in the document. Only if enableLayout and returnDetails are true. + /// List of annotations in the document. Only if enableAnnotations and returnDetails are true. + /// List of hyperlinks in the document. Only if returnDetails are true. + /// List of detected content segments. Only if enableSegment is true. + /// A new instance for mocking. + public static DocumentContent DocumentContent(string mimeType = default, string analyzerId = default, string category = default, string path = default, string markdown = default, IDictionary fields = default, int startPageNumber = default, int endPageNumber = default, LengthUnit? unit = default, IEnumerable pages = default, IEnumerable paragraphs = default, IEnumerable sections = default, IEnumerable tables = default, IEnumerable figures = default, IEnumerable annotations = default, IEnumerable hyperlinks = default, IEnumerable segments = default) + { + fields ??= new ChangeTrackingDictionary(); + pages ??= new ChangeTrackingList(); + paragraphs ??= new ChangeTrackingList(); + sections ??= new ChangeTrackingList(); + tables ??= new ChangeTrackingList(); + figures ??= new ChangeTrackingList(); + annotations ??= new ChangeTrackingList(); + hyperlinks ??= new ChangeTrackingList(); + segments ??= new ChangeTrackingList(); + + return new DocumentContent( + MediaContentKind.Document, + mimeType, + analyzerId, + category, + path, + markdown, + fields, + additionalBinaryDataProperties: null, + startPageNumber, + endPageNumber, + unit, + pages.ToList(), + paragraphs.ToList(), + sections.ToList(), + tables.ToList(), + figures.ToList(), + annotations.ToList(), + hyperlinks.ToList(), + segments.ToList()); + } + + /// Content from a document page. + /// Page number (1-based). + /// Width of the page. + /// Height of the page. + /// Span(s) associated with the page in the markdown content. + /// + /// The general orientation of the content in clockwise direction, + /// measured in degrees between (-180, 180]. + /// Only if enableOcr is true. + /// + /// List of words in the page. Only if enableOcr and returnDetails are true. + /// List of lines in the page. Only if enableOcr and returnDetails are true. + /// List of barcodes in the page. Only if enableBarcode and returnDetails are true. + /// List of mathematical formulas in the page. Only if enableFormula and returnDetails are true. + /// A new instance for mocking. + public static DocumentPage DocumentPage(int pageNumber = default, float? width = default, float? height = default, IEnumerable spans = default, float? angle = default, IEnumerable words = default, IEnumerable lines = default, IEnumerable barcodes = default, IEnumerable formulas = default) + { + spans ??= new ChangeTrackingList(); + words ??= new ChangeTrackingList(); + lines ??= new ChangeTrackingList(); + barcodes ??= new ChangeTrackingList(); + formulas ??= new ChangeTrackingList(); + + return new DocumentPage( + pageNumber, + width, + height, + spans.ToList(), + angle, + words.ToList(), + lines.ToList(), + barcodes.ToList(), + formulas.ToList(), + additionalBinaryDataProperties: null); + } + + /// + /// Word in a document, consisting of a contiguous sequence of characters. + /// For non-space delimited languages, such as Chinese, Japanese, and Korean, + /// each character is represented as its own word. + /// + /// Word text. + /// Encoded source that identifies the position of the word in the content. + /// Span of the word in the markdown content. + /// Confidence of predicting the word. + /// A new instance for mocking. + public static DocumentWord DocumentWord(string content = default, string source = default, ContentSpan span = default, float? confidence = default) + { + return new DocumentWord(content, source, span, confidence, additionalBinaryDataProperties: null); + } + + /// Line in a document, consisting of an contiguous sequence of words. + /// Line text. + /// Encoded source that identifies the position of the line in the content. + /// Span of the line in the markdown content. + /// A new instance for mocking. + public static DocumentLine DocumentLine(string content = default, string source = default, ContentSpan span = default) + { + return new DocumentLine(content, source, span, additionalBinaryDataProperties: null); + } + + /// Barcode in a document. + /// Barcode kind. + /// Barcode value. + /// Encoded source that identifies the position of the barcode in the content. + /// Span of the barcode in the markdown content. + /// Confidence of predicting the barcode. + /// A new instance for mocking. + public static DocumentBarcode DocumentBarcode(DocumentBarcodeKind kind = default, string value = default, string source = default, ContentSpan span = default, float? confidence = default) + { + return new DocumentBarcode( + kind, + value, + source, + span, + confidence, + additionalBinaryDataProperties: null); + } + + /// Mathematical formula in a document. + /// Formula kind. + /// LaTex expression describing the formula. + /// Encoded source that identifies the position of the formula in the content. + /// Span of the formula in the markdown content. + /// Confidence of predicting the formula. + /// A new instance for mocking. + public static DocumentFormula DocumentFormula(DocumentFormulaKind kind = default, string value = default, string source = default, ContentSpan span = default, float? confidence = default) + { + return new DocumentFormula( + kind, + value, + source, + span, + confidence, + additionalBinaryDataProperties: null); + } + + /// + /// Paragraph in a document, generally consisting of an contiguous sequence of lines + /// with common alignment and spacing. + /// + /// Semantic role of the paragraph. + /// Paragraph text. + /// Encoded source that identifies the position of the paragraph in the content. + /// Span of the paragraph in the markdown content. + /// A new instance for mocking. + public static DocumentParagraph DocumentParagraph(SemanticRole? role = default, string content = default, string source = default, ContentSpan span = default) + { + return new DocumentParagraph(role, content, source, span, additionalBinaryDataProperties: null); + } + + /// Section in a document. + /// Span of the section in the markdown content. + /// Child elements of the section. + /// A new instance for mocking. + public static DocumentSection DocumentSection(ContentSpan span = default, IEnumerable elements = default) + { + elements ??= new ChangeTrackingList(); + + return new DocumentSection(span, elements.ToList(), additionalBinaryDataProperties: null); + } + + /// Table in a document, consisting table cells arranged in a rectangular layout. + /// Number of rows in the table. + /// Number of columns in the table. + /// Cells contained within the table. + /// Encoded source that identifies the position of the table in the content. + /// Span of the table in the markdown content. + /// Table caption. + /// List of table footnotes. + /// Semantic role of the table. + /// A new instance for mocking. + public static DocumentTable DocumentTable(int rowCount = default, int columnCount = default, IEnumerable cells = default, string source = default, ContentSpan span = default, DocumentCaption caption = default, IEnumerable footnotes = default, SemanticRole? role = default) + { + cells ??= new ChangeTrackingList(); + footnotes ??= new ChangeTrackingList(); + + return new DocumentTable( + rowCount, + columnCount, + cells.ToList(), + source, + span, + caption, + footnotes.ToList(), + role, + additionalBinaryDataProperties: null); + } + + /// Table cell in a document table. + /// Table cell kind. + /// Row index of the cell. + /// Column index of the cell. + /// Number of rows spanned by this cell. + /// Number of columns spanned by this cell. + /// Content of the table cell. + /// Encoded source that identifies the position of the table cell in the content. + /// Span of the table cell in the markdown content. + /// Child elements of the table cell. + /// A new instance for mocking. + public static DocumentTableCell DocumentTableCell(DocumentTableCellKind? kind = default, int rowIndex = default, int columnIndex = default, int? rowSpan = default, int? columnSpan = default, string content = default, string source = default, ContentSpan span = default, IEnumerable elements = default) + { + elements ??= new ChangeTrackingList(); + + return new DocumentTableCell( + kind, + rowIndex, + columnIndex, + rowSpan, + columnSpan, + content, + source, + span, + elements.ToList(), + additionalBinaryDataProperties: null); + } + + /// Caption of a table or figure. + /// Content of the caption. + /// Encoded source that identifies the position of the caption in the content. + /// Span of the caption in the markdown content. + /// Child elements of the caption. + /// A new instance for mocking. + public static DocumentCaption DocumentCaption(string content = default, string source = default, ContentSpan span = default, IEnumerable elements = default) + { + elements ??= new ChangeTrackingList(); + + return new DocumentCaption(content, source, span, elements.ToList(), additionalBinaryDataProperties: null); + } + + /// Footnote of a table or figure. + /// Content of the footnote. + /// Encoded source that identifies the position of the footnote in the content. + /// Span of the footnote in the markdown content. + /// Child elements of the footnote. + /// A new instance for mocking. + public static DocumentFootnote DocumentFootnote(string content = default, string source = default, ContentSpan span = default, IEnumerable elements = default) + { + elements ??= new ChangeTrackingList(); + + return new DocumentFootnote(content, source, span, elements.ToList(), additionalBinaryDataProperties: null); + } + + /// + /// Figure in a document. + /// Please note this is the abstract base class. The derived classes available for instantiation are: and . + /// + /// Figure kind. + /// Figure identifier. + /// Encoded source that identifies the position of the figure in the content. + /// Span of the figure in the markdown content. + /// Child elements of the figure, excluding any caption or footnotes. + /// Figure caption. + /// List of figure footnotes. + /// Description of the figure. + /// Semantic role of the figure. + /// A new instance for mocking. + public static DocumentFigure DocumentFigure(string kind = default, string id = default, string source = default, ContentSpan span = default, IEnumerable elements = default, DocumentCaption caption = default, IEnumerable footnotes = default, string description = default, SemanticRole? role = default) + { + elements ??= new ChangeTrackingList(); + footnotes ??= new ChangeTrackingList(); + + return new UnknownDocumentFigure( + new DocumentFigureKind(kind), + id, + source, + span, + elements.ToList(), + caption, + footnotes.ToList(), + description, + role, + additionalBinaryDataProperties: null); + } + + /// Figure containing a chart, such as a bar chart, line chart, or pie chart. + /// Figure identifier. + /// Encoded source that identifies the position of the figure in the content. + /// Span of the figure in the markdown content. + /// Child elements of the figure, excluding any caption or footnotes. + /// Figure caption. + /// List of figure footnotes. + /// Description of the figure. + /// Semantic role of the figure. + /// Chart content represented using [Chart.js config](https://www.chartjs.org/docs/latest/configuration/). + /// A new instance for mocking. + public static DocumentChartFigure DocumentChartFigure(string id = default, string source = default, ContentSpan span = default, IEnumerable elements = default, DocumentCaption caption = default, IEnumerable footnotes = default, string description = default, SemanticRole? role = default, IDictionary content = default) + { + elements ??= new ChangeTrackingList(); + footnotes ??= new ChangeTrackingList(); + content ??= new ChangeTrackingDictionary(); + + return new DocumentChartFigure( + DocumentFigureKind.Chart, + id, + source, + span, + elements.ToList(), + caption, + footnotes.ToList(), + description, + role, + additionalBinaryDataProperties: null, + content); + } + + /// Figure containing a diagram, such as a flowchart or network diagram. + /// Figure identifier. + /// Encoded source that identifies the position of the figure in the content. + /// Span of the figure in the markdown content. + /// Child elements of the figure, excluding any caption or footnotes. + /// Figure caption. + /// List of figure footnotes. + /// Description of the figure. + /// Semantic role of the figure. + /// Diagram content represented using [Mermaid syntax](https://mermaid.js.org/intro/). + /// A new instance for mocking. + public static DocumentMermaidFigure DocumentMermaidFigure(string id = default, string source = default, ContentSpan span = default, IEnumerable elements = default, DocumentCaption caption = default, IEnumerable footnotes = default, string description = default, SemanticRole? role = default, string content = default) + { + elements ??= new ChangeTrackingList(); + footnotes ??= new ChangeTrackingList(); + + return new DocumentMermaidFigure( + DocumentFigureKind.Mermaid, + id, + source, + span, + elements.ToList(), + caption, + footnotes.ToList(), + description, + role, + additionalBinaryDataProperties: null, + content); + } + + /// Annotation in a document, such as a strikethrough or a comment. + /// Annotation identifier. + /// Annotation kind. + /// Spans of the content associated with the annotation. + /// Position of the annotation. + /// Comments associated with the annotation. + /// Annotation author. + /// Date and time when the annotation was created. + /// Date and time when the annotation was last modified. + /// Tags associated with the annotation. + /// A new instance for mocking. + public static DocumentAnnotation DocumentAnnotation(string id = default, DocumentAnnotationKind kind = default, IEnumerable spans = default, string source = default, IEnumerable comments = default, string author = default, DateTimeOffset? createdAt = default, DateTimeOffset? lastModifiedAt = default, IEnumerable tags = default) + { + spans ??= new ChangeTrackingList(); + comments ??= new ChangeTrackingList(); + tags ??= new ChangeTrackingList(); + + return new DocumentAnnotation( + id, + kind, + spans.ToList(), + source, + comments.ToList(), + author, + createdAt, + lastModifiedAt, + tags.ToList(), + additionalBinaryDataProperties: null); + } + + /// Comment associated with a document annotation. + /// Comment message in Markdown. + /// Author of the comment. + /// Date and time when the comment was created. + /// Date and time when the comment was last modified. + /// Tags associated with the comment. + /// A new instance for mocking. + public static DocumentAnnotationComment DocumentAnnotationComment(string message = default, string author = default, DateTimeOffset? createdAt = default, DateTimeOffset? lastModifiedAt = default, IEnumerable tags = default) + { + tags ??= new ChangeTrackingList(); + + return new DocumentAnnotationComment( + message, + author, + createdAt, + lastModifiedAt, + tags.ToList(), + additionalBinaryDataProperties: null); + } + + /// Hyperlink in a document, such as a link to a web page or an email address. + /// Hyperlinked content. + /// URL of the hyperlink. + /// Span of the hyperlink in the markdown content. + /// Position of the hyperlink. + /// A new instance for mocking. + public static DocumentHyperlink DocumentHyperlink(string content = default, string url = default, ContentSpan span = default, string source = default) + { + return new DocumentHyperlink(content, url, span, source, additionalBinaryDataProperties: null); + } + + /// Detected document content segment. + /// Segment identifier. + /// Classified content category. + /// Span of the segment in the markdown content. + /// Start page number (1-indexed) of the segment. + /// End page number (1-indexed) of the segment. + /// A new instance for mocking. + public static DocumentContentSegment DocumentContentSegment(string segmentId = default, string category = default, ContentSpan span = default, int startPageNumber = default, int endPageNumber = default) + { + return new DocumentContentSegment( + segmentId, + category, + span, + startPageNumber, + endPageNumber, + additionalBinaryDataProperties: null); + } + + /// Audio visual content. Ex. audio/wav, video/mp4. + /// Detected MIME type of the content. Ex. application/pdf, image/jpeg, etc. + /// The analyzer that generated this content. + /// Classified content category. + /// The path of the content in the input. + /// Markdown representation of the content. + /// Extracted fields from the content. + /// Start time of the content in milliseconds. + /// End time of the content in milliseconds. + /// Width of each video frame in pixels, if applicable. + /// Height of each video frame in pixels, if applicable. + /// List of camera shot changes in the video, represented by its timestamp in milliseconds. Only if returnDetails is true. + /// List of key frames in the video, represented by its timestamp in milliseconds. Only if returnDetails is true. + /// List of transcript phrases. Only if returnDetails is true. + /// List of detected content segments. Only if enableSegment is true. + /// A new instance for mocking. + public static AudioVisualContent AudioVisualContent(string mimeType = default, string analyzerId = default, string category = default, string path = default, string markdown = default, IDictionary fields = default, long startTimeMs = default, long endTimeMs = default, int? width = default, int? height = default, IEnumerable cameraShotTimesMs = default, IEnumerable keyFrameTimesMs = default, IEnumerable transcriptPhrases = default, IEnumerable segments = default) + { + fields ??= new ChangeTrackingDictionary(); + cameraShotTimesMs ??= new ChangeTrackingList(); + keyFrameTimesMs ??= new ChangeTrackingList(); + transcriptPhrases ??= new ChangeTrackingList(); + segments ??= new ChangeTrackingList(); + + return new AudioVisualContent( + MediaContentKind.AudioVisual, + mimeType, + analyzerId, + category, + path, + markdown, + fields, + additionalBinaryDataProperties: null, + startTimeMs, + endTimeMs, + width, + height, + cameraShotTimesMs.ToList(), + keyFrameTimesMs.ToList(), + transcriptPhrases.ToList(), + segments.ToList()); + } + + /// Transcript phrase. + /// Speaker index or name. + /// Start time of the phrase in milliseconds. + /// End time of the phrase in milliseconds. + /// Detected locale of the phrase. Ex. en-US. + /// Transcript text. + /// Confidence of predicting the phrase. + /// Span of the phrase in the markdown content. + /// List of words in the phrase. + /// A new instance for mocking. + public static TranscriptPhrase TranscriptPhrase(string speaker = default, long startTimeMs = default, long endTimeMs = default, string locale = default, string text = default, float? confidence = default, ContentSpan span = default, IEnumerable words = default) + { + words ??= new ChangeTrackingList(); + + return new TranscriptPhrase( + speaker, + startTimeMs, + endTimeMs, + locale, + text, + confidence, + span, + words.ToList(), + additionalBinaryDataProperties: null); + } + + /// Transcript word. + /// Start time of the word in milliseconds. + /// End time of the word in milliseconds. + /// Transcript text. + /// Span of the word in the markdown content. + /// A new instance for mocking. + public static TranscriptWord TranscriptWord(long startTimeMs = default, long endTimeMs = default, string text = default, ContentSpan span = default) + { + return new TranscriptWord(startTimeMs, endTimeMs, text, span, additionalBinaryDataProperties: null); + } + + /// Detected audio/visual content segment. + /// Segment identifier. + /// Classified content category. + /// Span of the segment in the markdown content. + /// Start time of the segment in milliseconds. + /// End time of the segment in milliseconds. + /// A new instance for mocking. + public static AudioVisualContentSegment AudioVisualContentSegment(string segmentId = default, string category = default, ContentSpan span = default, long startTimeMs = default, long endTimeMs = default) + { + return new AudioVisualContentSegment( + segmentId, + category, + span, + startTimeMs, + endTimeMs, + additionalBinaryDataProperties: null); + } + + /// Analyzer that extracts content and fields from multimodal documents. + /// The unique identifier of the analyzer. + /// A description of the analyzer. + /// Tags associated with the analyzer. + /// The status of the analyzer. + /// The date and time when the analyzer was created. + /// The date and time when the analyzer was last modified. + /// Warnings encountered while creating the analyzer. + /// The analyzer to incrementally train from. + /// Analyzer configuration settings. + /// The schema of fields to extracted. + /// Indicates whether the result may contain additional fields outside of the defined schema. + /// The location where the data may be processed. Defaults to global. + /// Additional knowledge sources used to enhance the analyzer. + /// + /// Mapping of model roles to specific model names. + /// Ex. { "completion": "gpt-4.1", "embedding": "text-embedding-3-large" }. + /// + /// Chat completion and embedding models supported by the analyzer. + /// A new instance for mocking. + public static ContentAnalyzer ContentAnalyzer(string analyzerId = default, string description = default, IDictionary tags = default, ContentAnalyzerStatus status = default, DateTimeOffset createdAt = default, DateTimeOffset lastModifiedAt = default, IEnumerable warnings = default, string baseAnalyzerId = default, ContentAnalyzerConfig config = default, ContentFieldSchema fieldSchema = default, bool? dynamicFieldSchema = default, ProcessingLocation? processingLocation = default, IEnumerable knowledgeSources = default, IDictionary models = default, SupportedModels supportedModels = default) + { + tags ??= new ChangeTrackingDictionary(); + warnings ??= new ChangeTrackingList(); + knowledgeSources ??= new ChangeTrackingList(); + models ??= new ChangeTrackingDictionary(); + + return new ContentAnalyzer( + analyzerId, + description, + tags, + status, + createdAt, + lastModifiedAt, + warnings.ToList(), + baseAnalyzerId, + config, + fieldSchema, + dynamicFieldSchema, + processingLocation, + knowledgeSources.ToList(), + models, + supportedModels, + additionalBinaryDataProperties: null); + } + + /// Configuration settings for an analyzer. + /// Return all content details. + /// List of locale hints for speech transcription. + /// Enable optical character recognition (OCR). + /// Enable layout analysis. + /// Enable generation of figure description. + /// Enable analysis of figures, such as charts and diagrams. + /// Enable mathematical formula detection. + /// Representation format of tables in analyze result markdown. + /// Representation format of charts in analyze result markdown. + /// Representation format of annotations in analyze result markdown. + /// Disable the default blurring of faces for privacy while processing the content. + /// Return field grounding source and confidence. + /// Map of categories to classify the input content(s) against. + /// Enable segmentation of the input by contentCategories. + /// Force segmentation of document content by page. + /// + /// Omit the content for this analyzer from analyze result. + /// Only return content(s) from additional analyzers specified in contentCategories, if any. + /// + /// A new instance for mocking. + public static ContentAnalyzerConfig ContentAnalyzerConfig(bool? returnDetails = default, IEnumerable locales = default, bool? enableOcr = default, bool? enableLayout = default, bool? enableFigureDescription = default, bool? enableFigureAnalysis = default, bool? enableFormula = default, TableFormat? tableFormat = default, ChartFormat? chartFormat = default, AnnotationFormat? annotationFormat = default, bool? disableFaceBlurring = default, bool? estimateFieldSourceAndConfidence = default, IDictionary contentCategories = default, bool? enableSegment = default, bool? segmentPerPage = default, bool? omitContent = default) + { + locales ??= new ChangeTrackingList(); + contentCategories ??= new ChangeTrackingDictionary(); + + return new ContentAnalyzerConfig( + returnDetails, + locales.ToList(), + enableOcr, + enableLayout, + enableFigureDescription, + enableFigureAnalysis, + enableFormula, + tableFormat, + chartFormat, + annotationFormat, + disableFaceBlurring, + estimateFieldSourceAndConfidence, + contentCategories, + enableSegment, + segmentPerPage, + omitContent, + additionalBinaryDataProperties: null); + } + + /// Content category definition. + /// The description of the category. + /// Optional analyzer used to process the content. + /// Optional inline definition of analyzer used to process the content. + /// A new instance for mocking. + public static ContentCategory ContentCategory(string description = default, string analyzerId = default, ContentAnalyzer analyzer = default) + { + return new ContentCategory(description, analyzerId, analyzer, additionalBinaryDataProperties: null); + } + + /// Schema of fields to be extracted from documents. + /// The name of the field schema. + /// A description of the field schema. + /// The fields defined in the schema. + /// Additional definitions referenced by the fields in the schema. + /// A new instance for mocking. + public static ContentFieldSchema ContentFieldSchema(string name = default, string description = default, IDictionary fields = default, IDictionary definitions = default) + { + fields ??= new ChangeTrackingDictionary(); + definitions ??= new ChangeTrackingDictionary(); + + return new ContentFieldSchema(name, description, fields, definitions, additionalBinaryDataProperties: null); + } + + /// Definition of the field using a JSON Schema like syntax. + /// Generation method. + /// Semantic data type of the field value. + /// Field description. + /// Field type schema of each array element, if type is array. + /// Named sub-fields, if type is object. + /// Examples of field values. + /// Enumeration of possible field values. + /// Descriptions for each enumeration value. + /// Reference to another field definition. + /// Return grounding source and confidence. + /// A new instance for mocking. + public static ContentFieldDefinition ContentFieldDefinition(GenerationMethod? @method = default, ContentFieldType? @type = default, string description = default, ContentFieldDefinition itemDefinition = default, IDictionary properties = default, IEnumerable examples = default, IEnumerable @enum = default, IDictionary enumDescriptions = default, string @ref = default, bool? estimateSourceAndConfidence = default) + { + properties ??= new ChangeTrackingDictionary(); + examples ??= new ChangeTrackingList(); + @enum ??= new ChangeTrackingList(); + enumDescriptions ??= new ChangeTrackingDictionary(); + + return new ContentFieldDefinition( + @method, + @type, + description, + itemDefinition, + properties, + examples.ToList(), + @enum.ToList(), + enumDescriptions, + @ref, + estimateSourceAndConfidence, + additionalBinaryDataProperties: null); + } + + /// + /// Knowledge source. + /// Please note this is the abstract base class. The derived classes available for instantiation are: . + /// + /// The kind of knowledge source. + /// A new instance for mocking. + public static KnowledgeSource KnowledgeSource(string kind = default) + { + return new UnknownKnowledgeSource(new KnowledgeSourceKind(kind), additionalBinaryDataProperties: null); + } + + /// Labeled data knowledge source. + /// The URL of the blob container containing labeled data. + /// An optional prefix to filter blobs within the container. + /// An optional path to a file listing specific blobs to include. + /// A new instance for mocking. + public static LabeledDataKnowledgeSource LabeledDataKnowledgeSource(Uri containerUrl = default, string prefix = default, string fileListPath = default) + { + return new LabeledDataKnowledgeSource(KnowledgeSourceKind.LabeledData, additionalBinaryDataProperties: null, containerUrl, prefix, fileListPath); + } + + /// Chat completion and embedding models supported by the analyzer. + /// Chat completion models supported by the analyzer. + /// Embedding models supported by the analyzer. + /// A new instance for mocking. + public static SupportedModels SupportedModels(IEnumerable completion = default, IEnumerable embedding = default) + { + completion ??= new ChangeTrackingList(); + embedding ??= new ChangeTrackingList(); + + return new SupportedModels(completion.ToList(), embedding.ToList(), additionalBinaryDataProperties: null); + } + + /// default settings for this Content Understanding resource. + /// + /// Mapping of model names to deployments. + /// Ex. { "gpt-4.1": "myGpt41Deployment", "text-embedding-3-large": "myTextEmbedding3LargeDeployment" }. + /// + /// A new instance for mocking. + public static ContentUnderstandingDefaults ContentUnderstandingDefaults(IDictionary modelDeployments = default) + { + modelDeployments ??= new ChangeTrackingDictionary(); + + return new ContentUnderstandingDefaults(modelDeployments, additionalBinaryDataProperties: null); + } + + /// Copy authorization details for cross-resource copy. + /// Full path of the source analyzer. + /// Azure resource ID of the target location to copy to. + /// Date/time when the copy authorization expires. + /// A new instance for mocking. + public static CopyAuthorization CopyAuthorization(string source = default, string targetAzureResourceId = default, DateTimeOffset expiresAt = default) + { + return new CopyAuthorization(source, targetAzureResourceId, expiresAt, additionalBinaryDataProperties: null); + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/Argument.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/Argument.cs new file mode 100644 index 000000000000..4111e44e9c0d --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/Argument.cs @@ -0,0 +1,74 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + internal static partial class Argument + { + /// The value. + /// The name. + public static void AssertNotNull(T value, string name) + { + if (value is null) + { + throw new ArgumentNullException(name); + } + } + + /// The value. + /// The name. + public static void AssertNotNull(T? value, string name) + where T : struct + { + if (!value.HasValue) + { + throw new ArgumentNullException(name); + } + } + + /// The value. + /// The name. + public static void AssertNotNullOrEmpty(IEnumerable value, string name) + { + if (value is null) + { + throw new ArgumentNullException(name); + } + if (value is ICollection collectionOfT && collectionOfT.Count == 0) + { + throw new ArgumentException("Value cannot be an empty collection.", name); + } + if (value is ICollection collection && collection.Count == 0) + { + throw new ArgumentException("Value cannot be an empty collection.", name); + } + using IEnumerator e = value.GetEnumerator(); + if (!e.MoveNext()) + { + throw new ArgumentException("Value cannot be an empty collection.", name); + } + } + + /// The value. + /// The name. + public static void AssertNotNullOrEmpty(string value, string name) + { + if (value is null) + { + throw new ArgumentNullException(name); + } + if (value.Length == 0) + { + throw new ArgumentException("Value cannot be an empty string.", name); + } + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/CancellationTokenExtensions.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/CancellationTokenExtensions.cs new file mode 100644 index 000000000000..fd5a0d066253 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/CancellationTokenExtensions.cs @@ -0,0 +1,17 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System.Threading; +using Azure; + +namespace Azure.AI.ContentUnderstanding +{ + internal static partial class CancellationTokenExtensions + { + public static RequestContext ToRequestContext(this CancellationToken cancellationToken) => cancellationToken.CanBeCanceled ? new RequestContext { CancellationToken = cancellationToken } : null; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/ChangeTrackingDictionary.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/ChangeTrackingDictionary.cs new file mode 100644 index 000000000000..50d361e142f6 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/ChangeTrackingDictionary.cs @@ -0,0 +1,189 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + internal partial class ChangeTrackingDictionary : IDictionary, IReadOnlyDictionary + where TKey : notnull + { + private IDictionary _innerDictionary; + + public ChangeTrackingDictionary() + { + } + + /// The inner dictionary. + public ChangeTrackingDictionary(IDictionary dictionary) + { + if (dictionary == null) + { + return; + } + _innerDictionary = new Dictionary(dictionary); + } + + /// The inner dictionary. + public ChangeTrackingDictionary(IReadOnlyDictionary dictionary) + { + if (dictionary == null) + { + return; + } + _innerDictionary = new Dictionary(); + foreach (var pair in dictionary) + { + _innerDictionary.Add(pair); + } + } + + /// Gets the IsUndefined. + public bool IsUndefined => _innerDictionary == null; + + /// Gets the Count. + public int Count => IsUndefined ? 0 : EnsureDictionary().Count; + + /// Gets the IsReadOnly. + public bool IsReadOnly => IsUndefined ? false : EnsureDictionary().IsReadOnly; + + /// Gets the Keys. + public ICollection Keys => IsUndefined ? Array.Empty() : EnsureDictionary().Keys; + + /// Gets the Values. + public ICollection Values => IsUndefined ? Array.Empty() : EnsureDictionary().Values; + + /// Gets or sets the value associated with the specified key. + public TValue this[TKey key] + { + get + { + if (IsUndefined) + { + throw new KeyNotFoundException(nameof(key)); + } + return EnsureDictionary()[key]; + } + set + { + EnsureDictionary()[key] = value; + } + } + + /// Gets the Keys. + IEnumerable IReadOnlyDictionary.Keys => Keys; + + /// Gets the Values. + IEnumerable IReadOnlyDictionary.Values => Values; + + public IEnumerator> GetEnumerator() + { + if (IsUndefined) + { + IEnumerator> enumerateEmpty() + { + yield break; + } + return enumerateEmpty(); + } + return EnsureDictionary().GetEnumerator(); + } + + IEnumerator IEnumerable.GetEnumerator() + { + return GetEnumerator(); + } + + /// The item to add. + public void Add(KeyValuePair item) + { + EnsureDictionary().Add(item); + } + + public void Clear() + { + EnsureDictionary().Clear(); + } + + /// The item to search for. + public bool Contains(KeyValuePair item) + { + if (IsUndefined) + { + return false; + } + return EnsureDictionary().Contains(item); + } + + /// The array to copy. + /// The index. + public void CopyTo(KeyValuePair[] array, int index) + { + if (IsUndefined) + { + return; + } + EnsureDictionary().CopyTo(array, index); + } + + /// The item to remove. + public bool Remove(KeyValuePair item) + { + if (IsUndefined) + { + return false; + } + return EnsureDictionary().Remove(item); + } + + /// The key. + /// The value to add. + public void Add(TKey key, TValue value) + { + EnsureDictionary().Add(key, value); + } + + /// The key to search for. + public bool ContainsKey(TKey key) + { + if (IsUndefined) + { + return false; + } + return EnsureDictionary().ContainsKey(key); + } + + /// The key. + public bool Remove(TKey key) + { + if (IsUndefined) + { + return false; + } + return EnsureDictionary().Remove(key); + } + + /// The key to search for. + /// The value. + public bool TryGetValue(TKey key, out TValue value) + { + if (IsUndefined) + { + value = default; + return false; + } + return EnsureDictionary().TryGetValue(key, out value); + } + + public IDictionary EnsureDictionary() + { + return _innerDictionary ??= new Dictionary(); + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/ChangeTrackingList.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/ChangeTrackingList.cs new file mode 100644 index 000000000000..d138bb822aa7 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/ChangeTrackingList.cs @@ -0,0 +1,168 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.AI.ContentUnderstanding +{ + internal partial class ChangeTrackingList : IList, IReadOnlyList + { + private IList _innerList; + + public ChangeTrackingList() + { + } + + /// The inner list. + public ChangeTrackingList(IList innerList) + { + if (innerList != null) + { + _innerList = innerList; + } + } + + /// The inner list. + public ChangeTrackingList(IReadOnlyList innerList) + { + if (innerList != null) + { + _innerList = innerList.ToList(); + } + } + + /// Gets the IsUndefined. + public bool IsUndefined => _innerList == null; + + /// Gets the Count. + public int Count => IsUndefined ? 0 : EnsureList().Count; + + /// Gets the IsReadOnly. + public bool IsReadOnly => IsUndefined ? false : EnsureList().IsReadOnly; + + /// Gets or sets the value associated with the specified key. + public T this[int index] + { + get + { + if (IsUndefined) + { + throw new ArgumentOutOfRangeException(nameof(index)); + } + return EnsureList()[index]; + } + set + { + if (IsUndefined) + { + throw new ArgumentOutOfRangeException(nameof(index)); + } + EnsureList()[index] = value; + } + } + + public void Reset() + { + _innerList = null; + } + + public IEnumerator GetEnumerator() + { + if (IsUndefined) + { + IEnumerator enumerateEmpty() + { + yield break; + } + return enumerateEmpty(); + } + return EnsureList().GetEnumerator(); + } + + IEnumerator IEnumerable.GetEnumerator() + { + return GetEnumerator(); + } + + /// The item to add. + public void Add(T item) + { + EnsureList().Add(item); + } + + public void Clear() + { + EnsureList().Clear(); + } + + /// The item. + public bool Contains(T item) + { + if (IsUndefined) + { + return false; + } + return EnsureList().Contains(item); + } + + /// The array to copy to. + /// The array index. + public void CopyTo(T[] array, int arrayIndex) + { + if (IsUndefined) + { + return; + } + EnsureList().CopyTo(array, arrayIndex); + } + + /// The item. + public bool Remove(T item) + { + if (IsUndefined) + { + return false; + } + return EnsureList().Remove(item); + } + + /// The item. + public int IndexOf(T item) + { + if (IsUndefined) + { + return -1; + } + return EnsureList().IndexOf(item); + } + + /// The inner list. + /// The item. + public void Insert(int index, T item) + { + EnsureList().Insert(index, item); + } + + /// The inner list. + public void RemoveAt(int index) + { + if (IsUndefined) + { + throw new ArgumentOutOfRangeException(nameof(index)); + } + EnsureList().RemoveAt(index); + } + + public IList EnsureList() + { + return _innerList ??= new List(); + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/ClientPipelineExtensions.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/ClientPipelineExtensions.cs new file mode 100644 index 000000000000..fd266729d88b --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/ClientPipelineExtensions.cs @@ -0,0 +1,72 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System.Threading; +using System.Threading.Tasks; +using Azure; +using Azure.Core; +using Azure.Core.Pipeline; + +namespace Azure.AI.ContentUnderstanding +{ + internal static partial class ClientPipelineExtensions + { + public static async ValueTask ProcessMessageAsync(this HttpPipeline pipeline, HttpMessage message, RequestContext context) + { + (CancellationToken userCancellationToken, ErrorOptions statusOption) = context.Parse(); + await pipeline.SendAsync(message, userCancellationToken).ConfigureAwait(false); + + if (message.Response.IsError && (context?.ErrorOptions & ErrorOptions.NoThrow) != ErrorOptions.NoThrow) + { + throw new RequestFailedException(message.Response); + } + + return message.Response; + } + + public static Response ProcessMessage(this HttpPipeline pipeline, HttpMessage message, RequestContext context) + { + (CancellationToken userCancellationToken, ErrorOptions statusOption) = context.Parse(); + pipeline.Send(message, userCancellationToken); + + if (message.Response.IsError && (context?.ErrorOptions & ErrorOptions.NoThrow) != ErrorOptions.NoThrow) + { + throw new RequestFailedException(message.Response); + } + + return message.Response; + } + + public static async ValueTask> ProcessHeadAsBoolMessageAsync(this HttpPipeline pipeline, HttpMessage message, RequestContext context) + { + Response response = await pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + switch (response.Status) + { + case >= 200 and < 300: + return Response.FromValue(true, response); + case >= 400 and < 500: + return Response.FromValue(false, response); + default: + return new ErrorResult(response, new RequestFailedException(response)); + } + } + + public static Response ProcessHeadAsBoolMessage(this HttpPipeline pipeline, HttpMessage message, RequestContext context) + { + Response response = pipeline.ProcessMessage(message, context); + switch (response.Status) + { + case >= 200 and < 300: + return Response.FromValue(true, response); + case >= 400 and < 500: + return Response.FromValue(false, response); + default: + return new ErrorResult(response, new RequestFailedException(response)); + } + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/CodeGenMemberAttribute.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/CodeGenMemberAttribute.cs new file mode 100644 index 000000000000..6084980c10e2 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/CodeGenMemberAttribute.cs @@ -0,0 +1,20 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; + +namespace Azure.AI.ContentUnderstanding +{ + [AttributeUsage((AttributeTargets.Property | AttributeTargets.Field))] + internal partial class CodeGenMemberAttribute : CodeGenTypeAttribute + { + /// The original name of the member. + public CodeGenMemberAttribute(string originalName) : base(originalName) + { + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/CodeGenSerializationAttribute.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/CodeGenSerializationAttribute.cs new file mode 100644 index 000000000000..640c210fa2e3 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/CodeGenSerializationAttribute.cs @@ -0,0 +1,48 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; + +namespace Azure.AI.ContentUnderstanding +{ + [AttributeUsage((AttributeTargets.Class | AttributeTargets.Struct), AllowMultiple = true, Inherited = true)] + internal partial class CodeGenSerializationAttribute : Attribute + { + /// The property name which these hooks apply to. + public CodeGenSerializationAttribute(string propertyName) + { + PropertyName = propertyName; + } + + /// The property name which these hooks apply to. + /// The serialization name of the property. + public CodeGenSerializationAttribute(string propertyName, string serializationName) + { + PropertyName = propertyName; + SerializationName = serializationName; + } + + /// Gets or sets the property name which these hooks should apply to. + public string PropertyName { get; } + + /// Gets or sets the serialization name of the property. + public string SerializationName { get; set; } + + /// + /// Gets or sets the method name to use when serializing the property value (property name excluded). + /// The signature of the serialization hook method must be or compatible with when invoking: private void SerializeHook(Utf8JsonWriter writer); + /// + public string SerializationValueHook { get; set; } + + /// + /// Gets or sets the method name to use when deserializing the property value from the JSON. + /// private static void DeserializationHook(JsonProperty property, ref TypeOfTheProperty propertyValue); // if the property is required + /// private static void DeserializationHook(JsonProperty property, ref Optional<TypeOfTheProperty> propertyValue); // if the property is optional + /// + public string DeserializationValueHook { get; set; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/CodeGenSuppressAttribute.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/CodeGenSuppressAttribute.cs new file mode 100644 index 000000000000..300ab640e44a --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/CodeGenSuppressAttribute.cs @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; + +namespace Azure.AI.ContentUnderstanding +{ + [AttributeUsage((AttributeTargets.Class | AttributeTargets.Enum | AttributeTargets.Struct), AllowMultiple = true)] + internal partial class CodeGenSuppressAttribute : Attribute + { + /// The member to suppress. + /// The types of the parameters of the member. + public CodeGenSuppressAttribute(string member, params Type[] parameters) + { + Member = member; + Parameters = parameters; + } + + /// Gets the Member. + public string Member { get; } + + /// Gets the Parameters. + public Type[] Parameters { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/CodeGenTypeAttribute.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/CodeGenTypeAttribute.cs new file mode 100644 index 000000000000..0afc7d0226dd --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/CodeGenTypeAttribute.cs @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; + +namespace Azure.AI.ContentUnderstanding +{ + [AttributeUsage((AttributeTargets.Class | AttributeTargets.Enum | AttributeTargets.Struct))] + internal partial class CodeGenTypeAttribute : Attribute + { + /// The original name of the type. + public CodeGenTypeAttribute(string originalName) + { + OriginalName = originalName; + } + + /// Gets the OriginalName. + public string OriginalName { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/ErrorResult.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/ErrorResult.cs new file mode 100644 index 000000000000..8f150c76bf48 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/ErrorResult.cs @@ -0,0 +1,32 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using Azure; + +namespace Azure.AI.ContentUnderstanding +{ + internal partial class ErrorResult : Response + { + private readonly Response _response; + private readonly RequestFailedException _exception; + + public ErrorResult(Response response, RequestFailedException exception) + { + _response = response; + _exception = exception; + } + + /// Gets the Value. + public override T Value => throw _exception; + + /// + public override Response GetRawResponse() + { + return _response; + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/ModelSerializationExtensions.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/ModelSerializationExtensions.cs new file mode 100644 index 000000000000..38303ef25309 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/ModelSerializationExtensions.cs @@ -0,0 +1,258 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Diagnostics; +using System.Globalization; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + internal static partial class ModelSerializationExtensions + { + internal static readonly ModelReaderWriterOptions WireOptions = new ModelReaderWriterOptions("W"); + internal static readonly JsonDocumentOptions JsonDocumentOptions = new JsonDocumentOptions + { + MaxDepth = 256 + }; + + public static object GetObject(this JsonElement element) + { + switch (element.ValueKind) + { + case JsonValueKind.String: + return element.GetString(); + case JsonValueKind.Number: + if (element.TryGetInt32(out int intValue)) + { + return intValue; + } + if (element.TryGetInt64(out long longValue)) + { + return longValue; + } + return element.GetDouble(); + case JsonValueKind.True: + return true; + case JsonValueKind.False: + return false; + case JsonValueKind.Undefined: + case JsonValueKind.Null: + return null; + case JsonValueKind.Object: + Dictionary dictionary = new Dictionary(); + foreach (var jsonProperty in element.EnumerateObject()) + { + dictionary.Add(jsonProperty.Name, jsonProperty.Value.GetObject()); + } + return dictionary; + case JsonValueKind.Array: + List list = new List(); + foreach (var item in element.EnumerateArray()) + { + list.Add(item.GetObject()); + } + return list.ToArray(); + default: + throw new NotSupportedException($"Not supported value kind {element.ValueKind}"); + } + } + + public static byte[] GetBytesFromBase64(this JsonElement element, string format) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + + return format switch + { + "U" => TypeFormatters.FromBase64UrlString(element.GetRequiredString()), + "D" => element.GetBytesFromBase64(), + _ => throw new ArgumentException($"Format is not supported: '{format}'", nameof(format)) + }; + } + + public static DateTimeOffset GetDateTimeOffset(this JsonElement element, string format) => format switch + { + "U" when element.ValueKind == JsonValueKind.Number => DateTimeOffset.FromUnixTimeSeconds(element.GetInt64()), + _ => TypeFormatters.ParseDateTimeOffset(element.GetString(), format) + }; + + public static TimeSpan GetTimeSpan(this JsonElement element, string format) => TypeFormatters.ParseTimeSpan(element.GetString(), format); + + public static char GetChar(this JsonElement element) + { + if (element.ValueKind == JsonValueKind.String) + { + string text = element.GetString(); + if (text == null || text.Length != 1) + { + throw new NotSupportedException($"Cannot convert \"{text}\" to a char"); + } + return text[0]; + } + else + { + throw new NotSupportedException($"Cannot convert {element.ValueKind} to a char"); + } + } + + [Conditional("DEBUG")] + public static void ThrowNonNullablePropertyIsNull(this JsonProperty @property) + { + throw new JsonException($"A property '{@property.Name}' defined as non-nullable but received as null from the service. This exception only happens in DEBUG builds of the library and would be ignored in the release build"); + } + + public static string GetRequiredString(this JsonElement element) + { + string value = element.GetString(); + if (value == null) + { + throw new InvalidOperationException($"The requested operation requires an element of type 'String', but the target element has type '{element.ValueKind}'."); + } + return value; + } + + public static void WriteStringValue(this Utf8JsonWriter writer, DateTimeOffset value, string format) + { + writer.WriteStringValue(TypeFormatters.ToString(value, format)); + } + + public static void WriteStringValue(this Utf8JsonWriter writer, DateTime value, string format) + { + writer.WriteStringValue(TypeFormatters.ToString(value, format)); + } + + public static void WriteStringValue(this Utf8JsonWriter writer, TimeSpan value, string format) + { + writer.WriteStringValue(TypeFormatters.ToString(value, format)); + } + + public static void WriteStringValue(this Utf8JsonWriter writer, char value) + { + writer.WriteStringValue(value.ToString(CultureInfo.InvariantCulture)); + } + + public static void WriteBase64StringValue(this Utf8JsonWriter writer, byte[] value, string format) + { + if (value == null) + { + writer.WriteNullValue(); + return; + } + switch (format) + { + case "U": + writer.WriteStringValue(TypeFormatters.ToBase64UrlString(value)); + break; + case "D": + writer.WriteBase64StringValue(value); + break; + default: + throw new ArgumentException($"Format is not supported: '{format}'", nameof(format)); + } + } + + public static void WriteNumberValue(this Utf8JsonWriter writer, DateTimeOffset value, string format) + { + if (format != "U") + { + throw new ArgumentOutOfRangeException(nameof(format), "Only 'U' format is supported when writing a DateTimeOffset as a Number."); + } + writer.WriteNumberValue(value.ToUnixTimeSeconds()); + } + + public static void WriteObjectValue(this Utf8JsonWriter writer, T value, ModelReaderWriterOptions options = null) + { + switch (value) + { + case null: + writer.WriteNullValue(); + break; + case IJsonModel jsonModel: + jsonModel.Write(writer, options ?? WireOptions); + break; + case byte[] bytes: + writer.WriteBase64StringValue(bytes); + break; + case BinaryData bytes0: + writer.WriteBase64StringValue(bytes0); + break; + case JsonElement json: + json.WriteTo(writer); + break; + case int i: + writer.WriteNumberValue(i); + break; + case decimal d: + writer.WriteNumberValue(d); + break; + case double d0: + if (double.IsNaN(d0)) + { + writer.WriteStringValue("NaN"); + } + else + { + writer.WriteNumberValue(d0); + } + break; + case float f: + writer.WriteNumberValue(f); + break; + case long l: + writer.WriteNumberValue(l); + break; + case string s: + writer.WriteStringValue(s); + break; + case bool b: + writer.WriteBooleanValue(b); + break; + case Guid g: + writer.WriteStringValue(g); + break; + case DateTimeOffset dateTimeOffset: + writer.WriteStringValue(dateTimeOffset, "O"); + break; + case DateTime dateTime: + writer.WriteStringValue(dateTime, "O"); + break; + case IEnumerable> enumerable: + writer.WriteStartObject(); + foreach (var pair in enumerable) + { + writer.WritePropertyName(pair.Key); + writer.WriteObjectValue(pair.Value, options); + } + writer.WriteEndObject(); + break; + case IEnumerable objectEnumerable: + writer.WriteStartArray(); + foreach (var item in objectEnumerable) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + break; + case TimeSpan timeSpan: + writer.WriteStringValue(timeSpan, "P"); + break; + default: + throw new NotSupportedException($"Not supported type {value.GetType()}"); + } + } + + public static void WriteObjectValue(this Utf8JsonWriter writer, object value, ModelReaderWriterOptions options = null) + { + writer.WriteObjectValue(value, options); + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/Optional.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/Optional.cs new file mode 100644 index 000000000000..3cb88cf335df --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/Optional.cs @@ -0,0 +1,51 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + internal static partial class Optional + { + public static bool IsCollectionDefined(IEnumerable collection) + { + return !(collection is ChangeTrackingList changeTrackingList && changeTrackingList.IsUndefined); + } + + public static bool IsCollectionDefined(IDictionary collection) + { + return !(collection is ChangeTrackingDictionary changeTrackingDictionary && changeTrackingDictionary.IsUndefined); + } + + public static bool IsCollectionDefined(IReadOnlyDictionary collection) + { + return !(collection is ChangeTrackingDictionary changeTrackingDictionary && changeTrackingDictionary.IsUndefined); + } + + public static bool IsDefined(T? value) + where T : struct + { + return value.HasValue; + } + + public static bool IsDefined(object value) + { + return value != null; + } + + public static bool IsDefined(string value) + { + return value != null; + } + + public static bool IsDefined(JsonElement value) + { + return value.ValueKind != JsonValueKind.Undefined; + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/RawRequestUriBuilderExtensions.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/RawRequestUriBuilderExtensions.cs new file mode 100644 index 000000000000..9dfa4c1511a2 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/RawRequestUriBuilderExtensions.cs @@ -0,0 +1,23 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System.Collections.Generic; +using System.Linq; +using Azure.Core; + +namespace Azure.AI.ContentUnderstanding +{ + internal static partial class RawRequestUriBuilderExtensions + { + public static void AppendQueryDelimited(this RawRequestUriBuilder builder, string name, IEnumerable value, string delimiter, SerializationFormat format = SerializationFormat.Default, bool escape = true) + { + delimiter ??= ","; + IEnumerable stringValues = value.Select(v => TypeFormatters.ConvertToString(v, format)); + builder.AppendQuery(name, string.Join(delimiter, stringValues), escape); + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/RequestContextExtensions.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/RequestContextExtensions.cs new file mode 100644 index 000000000000..497ea1e7c38b --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/RequestContextExtensions.cs @@ -0,0 +1,26 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Threading; +using Azure; + +namespace Azure.AI.ContentUnderstanding +{ + internal static partial class RequestContextExtensions + { + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + public static ValueTuple Parse(this RequestContext context) + { + if (context == null) + { + return (CancellationToken.None, ErrorOptions.Default); + } + return (context.CancellationToken, context.ErrorOptions); + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/SerializationFormat.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/SerializationFormat.cs new file mode 100644 index 000000000000..241dac2b24ac --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/SerializationFormat.cs @@ -0,0 +1,49 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +namespace Azure.AI.ContentUnderstanding +{ + internal enum SerializationFormat + { + /// The default serialization format. + Default = 0, + /// The RFC1123 date time format. + DateTime_RFC1123 = 1, + /// The RFC3339 date time format. + DateTime_RFC3339 = 2, + /// The RFC7231 date time format. + DateTime_RFC7231 = 3, + /// The ISO8601 date time format. + DateTime_ISO8601 = 4, + /// The Unix date time format. + DateTime_Unix = 5, + /// The ISO8601 date format. + Date_ISO8601 = 6, + /// The ISO8601 duration format. + Duration_ISO8601 = 7, + /// The constant duration format. + Duration_Constant = 8, + /// The seconds duration format. + Duration_Seconds = 9, + /// The seconds duration format with float precision. + Duration_Seconds_Float = 10, + /// The seconds duration format with double precision. + Duration_Seconds_Double = 11, + /// The milliseconds duration format. + Duration_Milliseconds = 12, + /// The milliseconds duration format with float precision. + Duration_Milliseconds_Float = 13, + /// The milliseconds duration format with double precision. + Duration_Milliseconds_Double = 14, + /// The ISO8601 time format. + Time_ISO8601 = 15, + /// The Base64Url bytes format. + Bytes_Base64Url = 16, + /// The Base64 bytes format. + Bytes_Base64 = 17 + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/TypeFormatters.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/TypeFormatters.cs new file mode 100644 index 000000000000..6759e260870c --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/TypeFormatters.cs @@ -0,0 +1,180 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Globalization; + +namespace Azure.AI.ContentUnderstanding +{ + internal static partial class TypeFormatters + { + private const string RoundtripZFormat = "yyyy-MM-ddTHH:mm:ss.fffffffZ"; + public const string DefaultNumberFormat = "G"; + + public static string ToString(bool value) => value ? "true" : "false"; + + public static string ToString(DateTime value, string format) => value.Kind switch + { + DateTimeKind.Utc => ToString((DateTimeOffset)value, format), + _ => throw new NotSupportedException($"DateTime {value} has a Kind of {value.Kind}. Generated clients require it to be UTC. You can call DateTime.SpecifyKind to change Kind property value to DateTimeKind.Utc.") + }; + + public static string ToString(DateTimeOffset value, string format) => format switch + { + "D" => value.ToString("yyyy-MM-dd", CultureInfo.InvariantCulture), + "U" => value.ToUnixTimeSeconds().ToString(CultureInfo.InvariantCulture), + "O" => value.ToUniversalTime().ToString(RoundtripZFormat, CultureInfo.InvariantCulture), + "o" => value.ToUniversalTime().ToString(RoundtripZFormat, CultureInfo.InvariantCulture), + "R" => value.ToString("r", CultureInfo.InvariantCulture), + _ => value.ToString(format, CultureInfo.InvariantCulture) + }; + + public static string ToString(TimeSpan value, string format) => format switch + { + "P" => System.Xml.XmlConvert.ToString(value), + _ => value.ToString(format, CultureInfo.InvariantCulture) + }; + + public static string ToString(byte[] value, string format) => format switch + { + "U" => ToBase64UrlString(value), + "D" => Convert.ToBase64String(value), + _ => throw new ArgumentException($"Format is not supported: '{format}'", nameof(format)) + }; + + public static string ToBase64UrlString(byte[] value) + { + int numWholeOrPartialInputBlocks = checked (value.Length + 2) / 3; + int size = checked (numWholeOrPartialInputBlocks * 4); + char[] output = new char[size]; + + int numBase64Chars = Convert.ToBase64CharArray(value, 0, value.Length, output, 0); + + int i = 0; + for (; i < numBase64Chars; i++) + { + char ch = output[i]; + if (ch == '+') + { + output[i] = '-'; + } + else + { + if (ch == '/') + { + output[i] = '_'; + } + else + { + if (ch == '=') + { + break; + } + } + } + } + + return new string(output, 0, i); + } + + public static byte[] FromBase64UrlString(string value) + { + int paddingCharsToAdd = (value.Length % 4) switch + { + 0 => 0, + 2 => 2, + 3 => 1, + _ => throw new InvalidOperationException("Malformed input") + }; + char[] output = new char[(value.Length + paddingCharsToAdd)]; + int i = 0; + for (; i < value.Length; i++) + { + char ch = value[i]; + if (ch == '-') + { + output[i] = '+'; + } + else + { + if (ch == '_') + { + output[i] = '/'; + } + else + { + output[i] = ch; + } + } + } + + for (; i < output.Length; i++) + { + output[i] = '='; + } + + return Convert.FromBase64CharArray(output, 0, output.Length); + } + + public static DateTimeOffset ParseDateTimeOffset(string value, string format) => format switch + { + "U" => DateTimeOffset.FromUnixTimeSeconds(long.Parse(value, CultureInfo.InvariantCulture)), + _ => DateTimeOffset.Parse(value, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal) + }; + + public static TimeSpan ParseTimeSpan(string value, string format) => format switch + { + "P" => System.Xml.XmlConvert.ToTimeSpan(value), + _ => TimeSpan.ParseExact(value, format, CultureInfo.InvariantCulture) + }; + + public static string ToFormatSpecifier(SerializationFormat format) => format switch + { + SerializationFormat.DateTime_RFC1123 => "R", + SerializationFormat.DateTime_RFC3339 => "O", + SerializationFormat.DateTime_RFC7231 => "R", + SerializationFormat.DateTime_ISO8601 => "O", + SerializationFormat.Date_ISO8601 => "D", + SerializationFormat.DateTime_Unix => "U", + SerializationFormat.Bytes_Base64Url => "U", + SerializationFormat.Bytes_Base64 => "D", + SerializationFormat.Duration_ISO8601 => "P", + SerializationFormat.Duration_Constant => "c", + SerializationFormat.Duration_Seconds => "%s", + SerializationFormat.Duration_Seconds_Float => "s\\.FFF", + SerializationFormat.Duration_Seconds_Double => "s\\.FFFFFF", + SerializationFormat.Time_ISO8601 => "T", + _ => null + }; + + public static string ConvertToString(object value, SerializationFormat format = SerializationFormat.Default) + { + string formatSpecifier = ToFormatSpecifier(format); + + return value switch + { + null => "null", + string s => s, + bool b => ToString(b), + int or float or double or long or decimal => ((IFormattable)value).ToString(DefaultNumberFormat, CultureInfo.InvariantCulture), + byte[] b0 when formatSpecifier != null => ToString(b0, formatSpecifier), + IEnumerable s0 => string.Join(",", s0), + DateTimeOffset dateTime when formatSpecifier != null => ToString(dateTime, formatSpecifier), + TimeSpan timeSpan when format == SerializationFormat.Duration_Seconds => Convert.ToInt32(timeSpan.TotalSeconds).ToString(CultureInfo.InvariantCulture), + TimeSpan timeSpan0 when format == SerializationFormat.Duration_Seconds_Float || format == SerializationFormat.Duration_Seconds_Double => timeSpan0.TotalSeconds.ToString(CultureInfo.InvariantCulture), + TimeSpan timeSpan1 when format == SerializationFormat.Duration_Milliseconds => Convert.ToInt32(timeSpan1.TotalMilliseconds).ToString(CultureInfo.InvariantCulture), + TimeSpan timeSpan2 when format == SerializationFormat.Duration_Milliseconds_Float || format == SerializationFormat.Duration_Milliseconds_Double => timeSpan2.TotalMilliseconds.ToString(CultureInfo.InvariantCulture), + TimeSpan timeSpan3 when formatSpecifier != null => ToString(timeSpan3, formatSpecifier), + TimeSpan timeSpan4 => System.Xml.XmlConvert.ToString(timeSpan4), + Guid guid => guid.ToString(), + BinaryData binaryData => ConvertToString(binaryData.ToArray(), format), + _ => value.ToString() + }; + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/Utf8JsonRequestContent.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/Utf8JsonRequestContent.cs new file mode 100644 index 000000000000..e215c1fe96fb --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Internal/Utf8JsonRequestContent.cs @@ -0,0 +1,61 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System.IO; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Azure.Core; + +namespace Azure.AI.ContentUnderstanding +{ + internal partial class Utf8JsonRequestContent : RequestContent + { + private readonly MemoryStream _stream; + private readonly RequestContent _content; + + public Utf8JsonRequestContent() + { + _stream = new MemoryStream(); + _content = Create(_stream); + JsonWriter = new Utf8JsonWriter(_stream); + } + + /// Gets the JsonWriter. + public Utf8JsonWriter JsonWriter { get; } + + /// The stream containing the data to be written. + /// The cancellation token to use. + public override async Task WriteToAsync(Stream stream, CancellationToken cancellationToken = default) + { + await JsonWriter.FlushAsync().ConfigureAwait(false); + await _content.WriteToAsync(stream, cancellationToken).ConfigureAwait(false); + } + + /// The stream containing the data to be written. + /// The cancellation token to use. + public override void WriteTo(Stream stream, CancellationToken cancellationToken = default) + { + JsonWriter.Flush(); + _content.WriteTo(stream, cancellationToken); + } + + /// + public override bool TryComputeLength(out long length) + { + length = JsonWriter.BytesCommitted + JsonWriter.BytesPending; + return true; + } + + public override void Dispose() + { + JsonWriter.Dispose(); + _content.Dispose(); + _stream.Dispose(); + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/AnalyzeInput.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/AnalyzeInput.Serialization.cs new file mode 100644 index 000000000000..c2c42ce1c1ac --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/AnalyzeInput.Serialization.cs @@ -0,0 +1,198 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// Additional input to analyze. + public partial class AnalyzeInput : IJsonModel + { + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AnalyzeInput)} does not support writing '{format}' format."); + } + if (Optional.IsDefined(Url)) + { + writer.WritePropertyName("url"u8); + writer.WriteStringValue(Url.AbsoluteUri); + } + if (Optional.IsDefined(Data)) + { + writer.WritePropertyName("data"u8); + writer.WriteBase64StringValue(Data.ToArray(), "D"); + } + if (Optional.IsDefined(Name)) + { + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + } + if (Optional.IsDefined(MimeType)) + { + writer.WritePropertyName("mimeType"u8); + writer.WriteStringValue(MimeType); + } + if (Optional.IsDefined(InputRange)) + { + writer.WritePropertyName("range"u8); + writer.WriteStringValue(InputRange); + } + if (options.Format != "W" && _additionalBinaryDataProperties != null) + { + foreach (var item in _additionalBinaryDataProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + AnalyzeInput IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected virtual AnalyzeInput JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AnalyzeInput)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAnalyzeInput(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static AnalyzeInput DeserializeAnalyzeInput(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Uri url = default; + BinaryData data = default; + string name = default; + string mimeType = default; + string inputRange = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("url"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + url = new Uri(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("data"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + data = BinaryData.FromBytes(prop.Value.GetBytesFromBase64("D")); + continue; + } + if (prop.NameEquals("name"u8)) + { + name = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("mimeType"u8)) + { + mimeType = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("range"u8)) + { + inputRange = prop.Value.GetString(); + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new AnalyzeInput( + url, + data, + name, + mimeType, + inputRange, + additionalBinaryDataProperties); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(AnalyzeInput)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + AnalyzeInput IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected virtual AnalyzeInput PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeAnalyzeInput(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AnalyzeInput)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/AnalyzeInput.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/AnalyzeInput.cs new file mode 100644 index 000000000000..87595366cd4c --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/AnalyzeInput.cs @@ -0,0 +1,71 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + /// Additional input to analyze. + public partial class AnalyzeInput + { + /// Keeps track of any properties unknown to the library. + private protected readonly IDictionary _additionalBinaryDataProperties; + + /// Initializes a new instance of . + public AnalyzeInput() + { + } + + /// Initializes a new instance of . + /// The URL of the input to analyze. Only one of url or data should be specified. + /// Raw image bytes. Provide bytes-like object; do not base64-encode. Only one of url or data should be specified. + /// Name of the input. + /// The MIME type of the input content. Ex. application/pdf, image/jpeg, etc. + /// Range of the input to analyze (ex. `1-3,5,9-`). Document content uses 1-based page numbers, while audio visual content uses integer milliseconds. + /// Keeps track of any properties unknown to the library. + internal AnalyzeInput(Uri url, BinaryData data, string name, string mimeType, string inputRange, IDictionary additionalBinaryDataProperties) + { + Url = url; + Data = data; + Name = name; + MimeType = mimeType; + InputRange = inputRange; + _additionalBinaryDataProperties = additionalBinaryDataProperties; + } + + /// The URL of the input to analyze. Only one of url or data should be specified. + public Uri Url { get; set; } + + /// + /// Raw image bytes. Provide bytes-like object; do not base64-encode. Only one of url or data should be specified. + /// + /// To assign a byte[] to this property use . + /// The byte[] will be serialized to a Base64 encoded string. + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromBytes(new byte[] { 1, 2, 3 }). + /// Creates a payload of "AQID". + /// + /// + /// + /// + public BinaryData Data { get; set; } + + /// Name of the input. + public string Name { get; set; } + + /// The MIME type of the input content. Ex. application/pdf, image/jpeg, etc. + public string MimeType { get; set; } + + /// Range of the input to analyze (ex. `1-3,5,9-`). Document content uses 1-based page numbers, while audio visual content uses integer milliseconds. + public string InputRange { get; set; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/AnalyzeRequest1.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/AnalyzeRequest1.Serialization.cs new file mode 100644 index 000000000000..17e6de0ac975 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/AnalyzeRequest1.Serialization.cs @@ -0,0 +1,205 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.ContentUnderstanding +{ + /// The AnalyzeRequest1. + internal partial class AnalyzeRequest1 : IJsonModel + { + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AnalyzeRequest1)} does not support writing '{format}' format."); + } + if (Optional.IsCollectionDefined(Inputs)) + { + writer.WritePropertyName("inputs"u8); + writer.WriteStartArray(); + foreach (AnalyzeInput item in Inputs) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (Optional.IsCollectionDefined(ModelDeployments)) + { + writer.WritePropertyName("modelDeployments"u8); + writer.WriteStartObject(); + foreach (var item in ModelDeployments) + { + writer.WritePropertyName(item.Key); + if (item.Value == null) + { + writer.WriteNullValue(); + continue; + } + writer.WriteStringValue(item.Value); + } + writer.WriteEndObject(); + } + if (options.Format != "W" && _additionalBinaryDataProperties != null) + { + foreach (var item in _additionalBinaryDataProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + AnalyzeRequest1 IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected virtual AnalyzeRequest1 JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AnalyzeRequest1)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAnalyzeRequest1(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static AnalyzeRequest1 DeserializeAnalyzeRequest1(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IList inputs = default; + IDictionary modelDeployments = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("inputs"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(AnalyzeInput.DeserializeAnalyzeInput(item, options)); + } + inputs = array; + continue; + } + if (prop.NameEquals("modelDeployments"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + Dictionary dictionary = new Dictionary(); + foreach (var prop0 in prop.Value.EnumerateObject()) + { + if (prop0.Value.ValueKind == JsonValueKind.Null) + { + dictionary.Add(prop0.Name, null); + } + else + { + dictionary.Add(prop0.Name, prop0.Value.GetString()); + } + } + modelDeployments = dictionary; + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new AnalyzeRequest1(inputs ?? new ChangeTrackingList(), modelDeployments ?? new ChangeTrackingDictionary(), additionalBinaryDataProperties); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(AnalyzeRequest1)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + AnalyzeRequest1 IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected virtual AnalyzeRequest1 PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeAnalyzeRequest1(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AnalyzeRequest1)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// The to serialize into . + public static implicit operator RequestContent(AnalyzeRequest1 analyzeRequest1) + { + if (analyzeRequest1 == null) + { + return null; + } + Utf8JsonRequestContent content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(analyzeRequest1, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/AnalyzeRequest1.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/AnalyzeRequest1.cs new file mode 100644 index 000000000000..62b87f0f5d95 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/AnalyzeRequest1.cs @@ -0,0 +1,49 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + /// The AnalyzeRequest1. + internal partial class AnalyzeRequest1 + { + /// Keeps track of any properties unknown to the library. + private protected readonly IDictionary _additionalBinaryDataProperties; + + /// Initializes a new instance of . + internal AnalyzeRequest1() + { + Inputs = new ChangeTrackingList(); + ModelDeployments = new ChangeTrackingDictionary(); + } + + /// Initializes a new instance of . + /// Inputs to analyze. Currently, only pro mode supports multiple inputs. + /// + /// Override default mapping of model names to deployments. + /// Ex. { "gpt-4.1": "myGpt41Deployment", "text-embedding-3-large": "myTextEmbedding3LargeDeployment" }. + /// + /// Keeps track of any properties unknown to the library. + internal AnalyzeRequest1(IList inputs, IDictionary modelDeployments, IDictionary additionalBinaryDataProperties) + { + Inputs = inputs; + ModelDeployments = modelDeployments; + _additionalBinaryDataProperties = additionalBinaryDataProperties; + } + + /// Inputs to analyze. Currently, only pro mode supports multiple inputs. + public IList Inputs { get; } + + /// + /// Override default mapping of model names to deployments. + /// Ex. { "gpt-4.1": "myGpt41Deployment", "text-embedding-3-large": "myTextEmbedding3LargeDeployment" }. + /// + public IDictionary ModelDeployments { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/AnalyzeResult.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/AnalyzeResult.Serialization.cs new file mode 100644 index 000000000000..81958f86ebfb --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/AnalyzeResult.Serialization.cs @@ -0,0 +1,246 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text; +using System.Text.Json; +using Azure; + +namespace Azure.AI.ContentUnderstanding +{ + /// Analyze operation result. + public partial class AnalyzeResult : IJsonModel + { + /// Initializes a new instance of for deserialization. + internal AnalyzeResult() + { + } + + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AnalyzeResult)} does not support writing '{format}' format."); + } + if (Optional.IsDefined(AnalyzerId)) + { + writer.WritePropertyName("analyzerId"u8); + writer.WriteStringValue(AnalyzerId); + } + if (Optional.IsDefined(ApiVersion)) + { + writer.WritePropertyName("apiVersion"u8); + writer.WriteStringValue(ApiVersion); + } + if (Optional.IsDefined(CreatedAt)) + { + writer.WritePropertyName("createdAt"u8); + writer.WriteStringValue(CreatedAt.Value, "O"); + } + if (Optional.IsCollectionDefined(Warnings)) + { + writer.WritePropertyName("warnings"u8); + writer.WriteStartArray(); + foreach (ResponseError item in Warnings) + { + if (item == null) + { + writer.WriteNullValue(); + continue; + } + ((IJsonModel)item).Write(writer, options); + } + writer.WriteEndArray(); + } + if (Optional.IsDefined(StringEncoding)) + { + writer.WritePropertyName("stringEncoding"u8); + writer.WriteStringValue(StringEncoding); + } + writer.WritePropertyName("contents"u8); + writer.WriteStartArray(); + foreach (MediaContent item in Contents) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + if (options.Format != "W" && _additionalBinaryDataProperties != null) + { + foreach (var item in _additionalBinaryDataProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + AnalyzeResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected virtual AnalyzeResult JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AnalyzeResult)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAnalyzeResult(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static AnalyzeResult DeserializeAnalyzeResult(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string analyzerId = default; + string apiVersion = default; + DateTimeOffset? createdAt = default; + IList warnings = default; + string stringEncoding = default; + IList contents = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("analyzerId"u8)) + { + analyzerId = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("apiVersion"u8)) + { + apiVersion = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("createdAt"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + createdAt = prop.Value.GetDateTimeOffset("O"); + continue; + } + if (prop.NameEquals("warnings"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + if (item.ValueKind == JsonValueKind.Null) + { + array.Add(null); + } + else + { + array.Add(ModelReaderWriter.Read(new BinaryData(Encoding.UTF8.GetBytes(item.GetRawText())), options, AzureAIContentUnderstandingContext.Default)); + } + } + warnings = array; + continue; + } + if (prop.NameEquals("stringEncoding"u8)) + { + stringEncoding = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("contents"u8)) + { + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(MediaContent.DeserializeMediaContent(item, options)); + } + contents = array; + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new AnalyzeResult( + analyzerId, + apiVersion, + createdAt, + warnings ?? new ChangeTrackingList(), + stringEncoding, + contents, + additionalBinaryDataProperties); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(AnalyzeResult)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + AnalyzeResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected virtual AnalyzeResult PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeAnalyzeResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AnalyzeResult)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/AnalyzeResult.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/AnalyzeResult.cs new file mode 100644 index 000000000000..d3ab3639ca4b --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/AnalyzeResult.cs @@ -0,0 +1,72 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; +using Azure; + +namespace Azure.AI.ContentUnderstanding +{ + /// Analyze operation result. + public partial class AnalyzeResult + { + /// Keeps track of any properties unknown to the library. + private protected readonly IDictionary _additionalBinaryDataProperties; + + /// Initializes a new instance of . + /// The extracted content. + internal AnalyzeResult(IEnumerable contents) + { + Warnings = new ChangeTrackingList(); + Contents = contents.ToList(); + } + + /// Initializes a new instance of . + /// The unique identifier of the analyzer. + /// The version of the API used to analyze the document. + /// The date and time when the result was created. + /// Warnings encountered while analyzing the document. + /// + /// The string encoding format for content spans in the response. + /// Possible values are 'codePoint', 'utf16', and `utf8`. Default is `codePoint`.") + /// + /// The extracted content. + /// Keeps track of any properties unknown to the library. + internal AnalyzeResult(string analyzerId, string apiVersion, DateTimeOffset? createdAt, IList warnings, string stringEncoding, IList contents, IDictionary additionalBinaryDataProperties) + { + AnalyzerId = analyzerId; + ApiVersion = apiVersion; + CreatedAt = createdAt; + Warnings = warnings; + StringEncoding = stringEncoding; + Contents = contents; + _additionalBinaryDataProperties = additionalBinaryDataProperties; + } + + /// The unique identifier of the analyzer. + public string AnalyzerId { get; } + + /// The version of the API used to analyze the document. + public string ApiVersion { get; } + + /// The date and time when the result was created. + public DateTimeOffset? CreatedAt { get; } + + /// Warnings encountered while analyzing the document. + public IList Warnings { get; } + + /// + /// The string encoding format for content spans in the response. + /// Possible values are 'codePoint', 'utf16', and `utf8`. Default is `codePoint`.") + /// + public string StringEncoding { get; } + + /// The extracted content. + public IList Contents { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/AnnotationFormat.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/AnnotationFormat.cs new file mode 100644 index 000000000000..c1ba37e84117 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/AnnotationFormat.cs @@ -0,0 +1,70 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.AI.ContentUnderstanding +{ + /// Representation format of annotations in analyze result markdown. + public readonly partial struct AnnotationFormat : IEquatable + { + private readonly string _value; + /// Do not represent annotations. + private const string NoneValue = "none"; + /// Represent basic annotation information using markdown formatting. + private const string MarkdownValue = "markdown"; + + /// Initializes a new instance of . + /// The value. + /// is null. + public AnnotationFormat(string value) + { + Argument.AssertNotNull(value, nameof(value)); + + _value = value; + } + + /// Do not represent annotations. + public static AnnotationFormat None { get; } = new AnnotationFormat(NoneValue); + + /// Represent basic annotation information using markdown formatting. + public static AnnotationFormat Markdown { get; } = new AnnotationFormat(MarkdownValue); + + /// Determines if two values are the same. + /// The left value to compare. + /// The right value to compare. + public static bool operator ==(AnnotationFormat left, AnnotationFormat right) => left.Equals(right); + + /// Determines if two values are not the same. + /// The left value to compare. + /// The right value to compare. + public static bool operator !=(AnnotationFormat left, AnnotationFormat right) => !left.Equals(right); + + /// Converts a string to a . + /// The value. + public static implicit operator AnnotationFormat(string value) => new AnnotationFormat(value); + + /// Converts a string to a . + /// The value. + public static implicit operator AnnotationFormat?(string value) => value == null ? null : new AnnotationFormat(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is AnnotationFormat other && Equals(other); + + /// + public bool Equals(AnnotationFormat other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + + /// + public override string ToString() => _value; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ArrayField.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ArrayField.Serialization.cs new file mode 100644 index 000000000000..d94bb798a90e --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ArrayField.Serialization.cs @@ -0,0 +1,192 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// Array field extracted from the content. + public partial class ArrayField : ContentField, IJsonModel + { + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ArrayField)} does not support writing '{format}' format."); + } + base.JsonModelWriteCore(writer, options); + writer.WritePropertyName("type"u8); + writer.WriteStringValue(FieldType.ToString()); + if (Optional.IsCollectionDefined(ValueArray)) + { + writer.WritePropertyName("valueArray"u8); + writer.WriteStartArray(); + foreach (ContentField item in ValueArray) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + ArrayField IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => (ArrayField)JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected override ContentField JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ArrayField)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeArrayField(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static ArrayField DeserializeArrayField(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + ContentFieldType @type = default; + IList spans = default; + float? confidence = default; + string source = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + ContentFieldType fieldType = default; + IList valueArray = default; + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("type"u8)) + { + @type = new ContentFieldType(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("spans"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(ContentSpan.DeserializeContentSpan(item, options)); + } + spans = array; + continue; + } + if (prop.NameEquals("confidence"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + confidence = prop.Value.GetSingle(); + continue; + } + if (prop.NameEquals("source"u8)) + { + source = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("type"u8)) + { + fieldType = new ContentFieldType(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("valueArray"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(DeserializeContentField(item, options)); + } + valueArray = array; + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new ArrayField( + @type, + spans ?? new ChangeTrackingList(), + confidence, + source, + additionalBinaryDataProperties, + fieldType, + valueArray ?? new ChangeTrackingList()); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(ArrayField)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + ArrayField IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => (ArrayField)PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected override ContentField PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeArrayField(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(ArrayField)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ArrayField.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ArrayField.cs new file mode 100644 index 000000000000..77815cfc3fd5 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ArrayField.cs @@ -0,0 +1,42 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + /// Array field extracted from the content. + public partial class ArrayField : ContentField + { + /// Initializes a new instance of . + internal ArrayField() : base(ContentFieldType.Array) + { + ValueArray = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// Semantic data type of the field value. + /// Span(s) associated with the field value in the markdown content. + /// Confidence of predicting the field value. + /// Encoded source that identifies the position of the field value in the content. + /// Keeps track of any properties unknown to the library. + /// Semantic data type of the field value. + /// Array field value. + internal ArrayField(ContentFieldType @type, IList spans, float? confidence, string source, IDictionary additionalBinaryDataProperties, ContentFieldType fieldType, IList valueArray) : base(@type, spans, confidence, source, additionalBinaryDataProperties) + { + FieldType = fieldType; + ValueArray = valueArray; + } + + /// Semantic data type of the field value. + internal ContentFieldType FieldType { get; set; } = "array"; + + /// Array field value. + public IList ValueArray { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/AudioVisualContent.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/AudioVisualContent.Serialization.cs new file mode 100644 index 000000000000..757b3093dcba --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/AudioVisualContent.Serialization.cs @@ -0,0 +1,88 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// Audio visual content. Ex. audio/wav, video/mp4. + public partial class AudioVisualContent : MediaContent, IJsonModel + { + /// Initializes a new instance of for deserialization. + internal AudioVisualContent() + { + } + + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON reader. + /// The client options for reading and writing models. + AudioVisualContent IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => (AudioVisualContent)JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected override MediaContent JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AudioVisualContent)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAudioVisualContent(document.RootElement, options); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(AudioVisualContent)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + AudioVisualContent IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => (AudioVisualContent)PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected override MediaContent PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeAudioVisualContent(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AudioVisualContent)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/AudioVisualContent.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/AudioVisualContent.cs new file mode 100644 index 000000000000..2907c61511c1 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/AudioVisualContent.cs @@ -0,0 +1,83 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + /// Audio visual content. Ex. audio/wav, video/mp4. + public partial class AudioVisualContent : MediaContent + { + /// Initializes a new instance of . + /// Detected MIME type of the content. Ex. application/pdf, image/jpeg, etc. + /// Start time of the content in milliseconds. + /// End time of the content in milliseconds. + internal AudioVisualContent(string mimeType, long startTimeMs, long endTimeMs) : base(MediaContentKind.AudioVisual, mimeType) + { + StartTimeMs = startTimeMs; + EndTimeMs = endTimeMs; + CameraShotTimesMs = new ChangeTrackingList(); + KeyFrameTimesMs = new ChangeTrackingList(); + TranscriptPhrases = new ChangeTrackingList(); + Segments = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// Content kind. + /// Detected MIME type of the content. Ex. application/pdf, image/jpeg, etc. + /// The analyzer that generated this content. + /// Classified content category. + /// The path of the content in the input. + /// Markdown representation of the content. + /// Extracted fields from the content. + /// Keeps track of any properties unknown to the library. + /// Start time of the content in milliseconds. + /// End time of the content in milliseconds. + /// Width of each video frame in pixels, if applicable. + /// Height of each video frame in pixels, if applicable. + /// List of camera shot changes in the video, represented by its timestamp in milliseconds. Only if returnDetails is true. + /// List of key frames in the video, represented by its timestamp in milliseconds. Only if returnDetails is true. + /// List of transcript phrases. Only if returnDetails is true. + /// List of detected content segments. Only if enableSegment is true. + internal AudioVisualContent(MediaContentKind kind, string mimeType, string analyzerId, string category, string path, string markdown, IDictionary fields, IDictionary additionalBinaryDataProperties, long startTimeMs, long endTimeMs, int? width, int? height, IList cameraShotTimesMs, IList keyFrameTimesMs, IList transcriptPhrases, IList segments) : base(kind, mimeType, analyzerId, category, path, markdown, fields, additionalBinaryDataProperties) + { + StartTimeMs = startTimeMs; + EndTimeMs = endTimeMs; + Width = width; + Height = height; + CameraShotTimesMs = cameraShotTimesMs; + KeyFrameTimesMs = keyFrameTimesMs; + TranscriptPhrases = transcriptPhrases; + Segments = segments; + } + + /// Start time of the content in milliseconds. + public long StartTimeMs { get; } + + /// End time of the content in milliseconds. + public long EndTimeMs { get; } + + /// Width of each video frame in pixels, if applicable. + public int? Width { get; } + + /// Height of each video frame in pixels, if applicable. + public int? Height { get; } + + /// List of camera shot changes in the video, represented by its timestamp in milliseconds. Only if returnDetails is true. + public IList CameraShotTimesMs { get; } + + /// List of key frames in the video, represented by its timestamp in milliseconds. Only if returnDetails is true. + public IList KeyFrameTimesMs { get; } + + /// List of transcript phrases. Only if returnDetails is true. + public IList TranscriptPhrases { get; } + + /// List of detected content segments. Only if enableSegment is true. + public IList Segments { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/AudioVisualContentSegment.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/AudioVisualContentSegment.Serialization.cs new file mode 100644 index 000000000000..7691ce983753 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/AudioVisualContentSegment.Serialization.cs @@ -0,0 +1,180 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// Detected audio/visual content segment. + public partial class AudioVisualContentSegment : IJsonModel + { + /// Initializes a new instance of for deserialization. + internal AudioVisualContentSegment() + { + } + + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AudioVisualContentSegment)} does not support writing '{format}' format."); + } + writer.WritePropertyName("segmentId"u8); + writer.WriteStringValue(SegmentId); + writer.WritePropertyName("category"u8); + writer.WriteStringValue(Category); + writer.WritePropertyName("span"u8); + writer.WriteObjectValue(Span, options); + writer.WritePropertyName("startTimeMs"u8); + writer.WriteNumberValue(StartTimeMs); + writer.WritePropertyName("endTimeMs"u8); + writer.WriteNumberValue(EndTimeMs); + if (options.Format != "W" && _additionalBinaryDataProperties != null) + { + foreach (var item in _additionalBinaryDataProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + AudioVisualContentSegment IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected virtual AudioVisualContentSegment JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AudioVisualContentSegment)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAudioVisualContentSegment(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static AudioVisualContentSegment DeserializeAudioVisualContentSegment(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string segmentId = default; + string category = default; + ContentSpan span = default; + long startTimeMs = default; + long endTimeMs = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("segmentId"u8)) + { + segmentId = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("category"u8)) + { + category = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("span"u8)) + { + span = ContentSpan.DeserializeContentSpan(prop.Value, options); + continue; + } + if (prop.NameEquals("startTimeMs"u8)) + { + startTimeMs = prop.Value.GetInt64(); + continue; + } + if (prop.NameEquals("endTimeMs"u8)) + { + endTimeMs = prop.Value.GetInt64(); + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new AudioVisualContentSegment( + segmentId, + category, + span, + startTimeMs, + endTimeMs, + additionalBinaryDataProperties); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(AudioVisualContentSegment)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + AudioVisualContentSegment IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected virtual AudioVisualContentSegment PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeAudioVisualContentSegment(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AudioVisualContentSegment)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/AudioVisualContentSegment.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/AudioVisualContentSegment.cs new file mode 100644 index 000000000000..6718cf924278 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/AudioVisualContentSegment.cs @@ -0,0 +1,66 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + /// Detected audio/visual content segment. + public partial class AudioVisualContentSegment + { + /// Keeps track of any properties unknown to the library. + private protected readonly IDictionary _additionalBinaryDataProperties; + + /// Initializes a new instance of . + /// Segment identifier. + /// Classified content category. + /// Span of the segment in the markdown content. + /// Start time of the segment in milliseconds. + /// End time of the segment in milliseconds. + internal AudioVisualContentSegment(string segmentId, string category, ContentSpan span, long startTimeMs, long endTimeMs) + { + SegmentId = segmentId; + Category = category; + Span = span; + StartTimeMs = startTimeMs; + EndTimeMs = endTimeMs; + } + + /// Initializes a new instance of . + /// Segment identifier. + /// Classified content category. + /// Span of the segment in the markdown content. + /// Start time of the segment in milliseconds. + /// End time of the segment in milliseconds. + /// Keeps track of any properties unknown to the library. + internal AudioVisualContentSegment(string segmentId, string category, ContentSpan span, long startTimeMs, long endTimeMs, IDictionary additionalBinaryDataProperties) + { + SegmentId = segmentId; + Category = category; + Span = span; + StartTimeMs = startTimeMs; + EndTimeMs = endTimeMs; + _additionalBinaryDataProperties = additionalBinaryDataProperties; + } + + /// Segment identifier. + public string SegmentId { get; } + + /// Classified content category. + public string Category { get; } + + /// Span of the segment in the markdown content. + public ContentSpan Span { get; } + + /// Start time of the segment in milliseconds. + public long StartTimeMs { get; } + + /// End time of the segment in milliseconds. + public long EndTimeMs { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/AzureAIContentUnderstandingContext.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/AzureAIContentUnderstandingContext.cs new file mode 100644 index 000000000000..53b089a224e3 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/AzureAIContentUnderstandingContext.cs @@ -0,0 +1,79 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System.ClientModel.Primitives; +using Azure; + +namespace Azure.AI.ContentUnderstanding +{ + /// + /// Context class which will be filled in by the System.ClientModel.SourceGeneration. + /// For more information + /// + [ModelReaderWriterBuildable(typeof(AnalyzeInput))] + [ModelReaderWriterBuildable(typeof(AnalyzeRequest1))] + [ModelReaderWriterBuildable(typeof(AnalyzeResult))] + [ModelReaderWriterBuildable(typeof(ArrayField))] + [ModelReaderWriterBuildable(typeof(AudioVisualContent))] + [ModelReaderWriterBuildable(typeof(AudioVisualContentSegment))] + [ModelReaderWriterBuildable(typeof(BooleanField))] + [ModelReaderWriterBuildable(typeof(ContentAnalyzer))] + [ModelReaderWriterBuildable(typeof(ContentAnalyzerAnalyzeOperationStatus))] + [ModelReaderWriterBuildable(typeof(ContentAnalyzerConfig))] + [ModelReaderWriterBuildable(typeof(ContentAnalyzerOperationStatus))] + [ModelReaderWriterBuildable(typeof(ContentCategory))] + [ModelReaderWriterBuildable(typeof(ContentField))] + [ModelReaderWriterBuildable(typeof(ContentFieldDefinition))] + [ModelReaderWriterBuildable(typeof(ContentFieldSchema))] + [ModelReaderWriterBuildable(typeof(ContentSpan))] + [ModelReaderWriterBuildable(typeof(ContentUnderstandingDefaults))] + [ModelReaderWriterBuildable(typeof(CopyAnalyzerRequest))] + [ModelReaderWriterBuildable(typeof(CopyAuthorization))] + [ModelReaderWriterBuildable(typeof(DateField))] + [ModelReaderWriterBuildable(typeof(DocumentAnnotation))] + [ModelReaderWriterBuildable(typeof(DocumentAnnotationComment))] + [ModelReaderWriterBuildable(typeof(DocumentBarcode))] + [ModelReaderWriterBuildable(typeof(DocumentCaption))] + [ModelReaderWriterBuildable(typeof(DocumentChartFigure))] + [ModelReaderWriterBuildable(typeof(DocumentContent))] + [ModelReaderWriterBuildable(typeof(DocumentContentSegment))] + [ModelReaderWriterBuildable(typeof(DocumentFigure))] + [ModelReaderWriterBuildable(typeof(DocumentFootnote))] + [ModelReaderWriterBuildable(typeof(DocumentFormula))] + [ModelReaderWriterBuildable(typeof(DocumentHyperlink))] + [ModelReaderWriterBuildable(typeof(DocumentLine))] + [ModelReaderWriterBuildable(typeof(DocumentMermaidFigure))] + [ModelReaderWriterBuildable(typeof(DocumentPage))] + [ModelReaderWriterBuildable(typeof(DocumentParagraph))] + [ModelReaderWriterBuildable(typeof(DocumentSection))] + [ModelReaderWriterBuildable(typeof(DocumentTable))] + [ModelReaderWriterBuildable(typeof(DocumentTableCell))] + [ModelReaderWriterBuildable(typeof(DocumentWord))] + [ModelReaderWriterBuildable(typeof(GrantCopyAuthorizationRequest1))] + [ModelReaderWriterBuildable(typeof(IntegerField))] + [ModelReaderWriterBuildable(typeof(JsonField))] + [ModelReaderWriterBuildable(typeof(KnowledgeSource))] + [ModelReaderWriterBuildable(typeof(LabeledDataKnowledgeSource))] + [ModelReaderWriterBuildable(typeof(MediaContent))] + [ModelReaderWriterBuildable(typeof(NumberField))] + [ModelReaderWriterBuildable(typeof(ObjectField))] + [ModelReaderWriterBuildable(typeof(PagedContentAnalyzer))] + [ModelReaderWriterBuildable(typeof(ResponseError))] + [ModelReaderWriterBuildable(typeof(StringField))] + [ModelReaderWriterBuildable(typeof(SupportedModels))] + [ModelReaderWriterBuildable(typeof(TimeField))] + [ModelReaderWriterBuildable(typeof(TranscriptPhrase))] + [ModelReaderWriterBuildable(typeof(TranscriptWord))] + [ModelReaderWriterBuildable(typeof(UnknownContentField))] + [ModelReaderWriterBuildable(typeof(UnknownDocumentFigure))] + [ModelReaderWriterBuildable(typeof(UnknownKnowledgeSource))] + [ModelReaderWriterBuildable(typeof(UnknownMediaContent))] + [ModelReaderWriterBuildable(typeof(UsageDetails))] + public partial class AzureAIContentUnderstandingContext : ModelReaderWriterContext + { + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/BooleanField.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/BooleanField.Serialization.cs new file mode 100644 index 000000000000..108976e9afbb --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/BooleanField.Serialization.cs @@ -0,0 +1,182 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// Boolean field extracted from the content. + public partial class BooleanField : ContentField, IJsonModel + { + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(BooleanField)} does not support writing '{format}' format."); + } + base.JsonModelWriteCore(writer, options); + writer.WritePropertyName("type"u8); + writer.WriteStringValue(FieldType.ToString()); + if (Optional.IsDefined(ValueBoolean)) + { + writer.WritePropertyName("valueBoolean"u8); + writer.WriteBooleanValue(ValueBoolean.Value); + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + BooleanField IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => (BooleanField)JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected override ContentField JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(BooleanField)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeBooleanField(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static BooleanField DeserializeBooleanField(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + ContentFieldType @type = default; + IList spans = default; + float? confidence = default; + string source = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + ContentFieldType fieldType = default; + bool? valueBoolean = default; + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("type"u8)) + { + @type = new ContentFieldType(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("spans"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(ContentSpan.DeserializeContentSpan(item, options)); + } + spans = array; + continue; + } + if (prop.NameEquals("confidence"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + confidence = prop.Value.GetSingle(); + continue; + } + if (prop.NameEquals("source"u8)) + { + source = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("type"u8)) + { + fieldType = new ContentFieldType(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("valueBoolean"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + valueBoolean = prop.Value.GetBoolean(); + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new BooleanField( + @type, + spans ?? new ChangeTrackingList(), + confidence, + source, + additionalBinaryDataProperties, + fieldType, + valueBoolean); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(BooleanField)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + BooleanField IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => (BooleanField)PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected override ContentField PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeBooleanField(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(BooleanField)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/BooleanField.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/BooleanField.cs new file mode 100644 index 000000000000..69e01181d522 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/BooleanField.cs @@ -0,0 +1,41 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + /// Boolean field extracted from the content. + public partial class BooleanField : ContentField + { + /// Initializes a new instance of . + internal BooleanField() : base(ContentFieldType.Boolean) + { + } + + /// Initializes a new instance of . + /// Semantic data type of the field value. + /// Span(s) associated with the field value in the markdown content. + /// Confidence of predicting the field value. + /// Encoded source that identifies the position of the field value in the content. + /// Keeps track of any properties unknown to the library. + /// Semantic data type of the field value. + /// Boolean field value. + internal BooleanField(ContentFieldType @type, IList spans, float? confidence, string source, IDictionary additionalBinaryDataProperties, ContentFieldType fieldType, bool? valueBoolean) : base(@type, spans, confidence, source, additionalBinaryDataProperties) + { + FieldType = fieldType; + ValueBoolean = valueBoolean; + } + + /// Semantic data type of the field value. + internal ContentFieldType FieldType { get; set; } = "boolean"; + + /// Boolean field value. + public bool? ValueBoolean { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ChartFormat.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ChartFormat.cs new file mode 100644 index 000000000000..1b70154238d3 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ChartFormat.cs @@ -0,0 +1,70 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.AI.ContentUnderstanding +{ + /// Representation format of charts in analyze result markdown. + public readonly partial struct ChartFormat : IEquatable + { + private readonly string _value; + /// Represent charts as Chart.js code blocks. + private const string ChartJsValue = "chartJs"; + /// Represent charts as markdown tables. + private const string MarkdownValue = "markdown"; + + /// Initializes a new instance of . + /// The value. + /// is null. + public ChartFormat(string value) + { + Argument.AssertNotNull(value, nameof(value)); + + _value = value; + } + + /// Represent charts as Chart.js code blocks. + public static ChartFormat ChartJs { get; } = new ChartFormat(ChartJsValue); + + /// Represent charts as markdown tables. + public static ChartFormat Markdown { get; } = new ChartFormat(MarkdownValue); + + /// Determines if two values are the same. + /// The left value to compare. + /// The right value to compare. + public static bool operator ==(ChartFormat left, ChartFormat right) => left.Equals(right); + + /// Determines if two values are not the same. + /// The left value to compare. + /// The right value to compare. + public static bool operator !=(ChartFormat left, ChartFormat right) => !left.Equals(right); + + /// Converts a string to a . + /// The value. + public static implicit operator ChartFormat(string value) => new ChartFormat(value); + + /// Converts a string to a . + /// The value. + public static implicit operator ChartFormat?(string value) => value == null ? null : new ChartFormat(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is ChartFormat other && Equals(other); + + /// + public bool Equals(ChartFormat other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + + /// + public override string ToString() => _value; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentAnalyzer.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentAnalyzer.Serialization.cs new file mode 100644 index 000000000000..686580693b30 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentAnalyzer.Serialization.cs @@ -0,0 +1,446 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text; +using System.Text.Json; +using Azure; +using Azure.Core; + +namespace Azure.AI.ContentUnderstanding +{ + /// Analyzer that extracts content and fields from multimodal documents. + public partial class ContentAnalyzer : IJsonModel + { + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ContentAnalyzer)} does not support writing '{format}' format."); + } + if (options.Format != "W") + { + writer.WritePropertyName("analyzerId"u8); + writer.WriteStringValue(AnalyzerId); + } + if (Optional.IsDefined(Description)) + { + writer.WritePropertyName("description"u8); + writer.WriteStringValue(Description); + } + if (Optional.IsCollectionDefined(Tags)) + { + writer.WritePropertyName("tags"u8); + writer.WriteStartObject(); + foreach (var item in Tags) + { + writer.WritePropertyName(item.Key); + if (item.Value == null) + { + writer.WriteNullValue(); + continue; + } + writer.WriteStringValue(item.Value); + } + writer.WriteEndObject(); + } + if (options.Format != "W") + { + writer.WritePropertyName("status"u8); + writer.WriteStringValue(Status.ToString()); + } + if (options.Format != "W") + { + writer.WritePropertyName("createdAt"u8); + writer.WriteStringValue(CreatedAt, "O"); + } + if (options.Format != "W") + { + writer.WritePropertyName("lastModifiedAt"u8); + writer.WriteStringValue(LastModifiedAt, "O"); + } + if (options.Format != "W" && Optional.IsCollectionDefined(Warnings)) + { + writer.WritePropertyName("warnings"u8); + writer.WriteStartArray(); + foreach (ResponseError item in Warnings) + { + if (item == null) + { + writer.WriteNullValue(); + continue; + } + ((IJsonModel)item).Write(writer, options); + } + writer.WriteEndArray(); + } + if (Optional.IsDefined(BaseAnalyzerId)) + { + writer.WritePropertyName("baseAnalyzerId"u8); + writer.WriteStringValue(BaseAnalyzerId); + } + if (Optional.IsDefined(Config)) + { + writer.WritePropertyName("config"u8); + writer.WriteObjectValue(Config, options); + } + if (Optional.IsDefined(FieldSchema)) + { + writer.WritePropertyName("fieldSchema"u8); + writer.WriteObjectValue(FieldSchema, options); + } + if (Optional.IsDefined(DynamicFieldSchema)) + { + writer.WritePropertyName("dynamicFieldSchema"u8); + writer.WriteBooleanValue(DynamicFieldSchema.Value); + } + if (Optional.IsDefined(ProcessingLocation)) + { + writer.WritePropertyName("processingLocation"u8); + writer.WriteStringValue(ProcessingLocation.Value.ToString()); + } + if (Optional.IsCollectionDefined(KnowledgeSources)) + { + writer.WritePropertyName("knowledgeSources"u8); + writer.WriteStartArray(); + foreach (KnowledgeSource item in KnowledgeSources) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (Optional.IsCollectionDefined(Models)) + { + writer.WritePropertyName("models"u8); + writer.WriteStartObject(); + foreach (var item in Models) + { + writer.WritePropertyName(item.Key); + if (item.Value == null) + { + writer.WriteNullValue(); + continue; + } + writer.WriteStringValue(item.Value); + } + writer.WriteEndObject(); + } + if (options.Format != "W" && Optional.IsDefined(SupportedModels)) + { + writer.WritePropertyName("supportedModels"u8); + writer.WriteObjectValue(SupportedModels, options); + } + if (options.Format != "W" && _additionalBinaryDataProperties != null) + { + foreach (var item in _additionalBinaryDataProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + ContentAnalyzer IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected virtual ContentAnalyzer JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ContentAnalyzer)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeContentAnalyzer(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static ContentAnalyzer DeserializeContentAnalyzer(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string analyzerId = default; + string description = default; + IDictionary tags = default; + ContentAnalyzerStatus status = default; + DateTimeOffset createdAt = default; + DateTimeOffset lastModifiedAt = default; + IReadOnlyList warnings = default; + string baseAnalyzerId = default; + ContentAnalyzerConfig config = default; + ContentFieldSchema fieldSchema = default; + bool? dynamicFieldSchema = default; + ProcessingLocation? processingLocation = default; + IList knowledgeSources = default; + IDictionary models = default; + SupportedModels supportedModels = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("analyzerId"u8)) + { + analyzerId = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("description"u8)) + { + description = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("tags"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + Dictionary dictionary = new Dictionary(); + foreach (var prop0 in prop.Value.EnumerateObject()) + { + if (prop0.Value.ValueKind == JsonValueKind.Null) + { + dictionary.Add(prop0.Name, null); + } + else + { + dictionary.Add(prop0.Name, prop0.Value.GetString()); + } + } + tags = dictionary; + continue; + } + if (prop.NameEquals("status"u8)) + { + status = new ContentAnalyzerStatus(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("createdAt"u8)) + { + createdAt = prop.Value.GetDateTimeOffset("O"); + continue; + } + if (prop.NameEquals("lastModifiedAt"u8)) + { + lastModifiedAt = prop.Value.GetDateTimeOffset("O"); + continue; + } + if (prop.NameEquals("warnings"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + if (item.ValueKind == JsonValueKind.Null) + { + array.Add(null); + } + else + { + array.Add(ModelReaderWriter.Read(new BinaryData(Encoding.UTF8.GetBytes(item.GetRawText())), options, AzureAIContentUnderstandingContext.Default)); + } + } + warnings = array; + continue; + } + if (prop.NameEquals("baseAnalyzerId"u8)) + { + baseAnalyzerId = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("config"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + config = ContentAnalyzerConfig.DeserializeContentAnalyzerConfig(prop.Value, options); + continue; + } + if (prop.NameEquals("fieldSchema"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + fieldSchema = ContentFieldSchema.DeserializeContentFieldSchema(prop.Value, options); + continue; + } + if (prop.NameEquals("dynamicFieldSchema"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + dynamicFieldSchema = prop.Value.GetBoolean(); + continue; + } + if (prop.NameEquals("processingLocation"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + processingLocation = new ProcessingLocation(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("knowledgeSources"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(KnowledgeSource.DeserializeKnowledgeSource(item, options)); + } + knowledgeSources = array; + continue; + } + if (prop.NameEquals("models"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + Dictionary dictionary = new Dictionary(); + foreach (var prop0 in prop.Value.EnumerateObject()) + { + if (prop0.Value.ValueKind == JsonValueKind.Null) + { + dictionary.Add(prop0.Name, null); + } + else + { + dictionary.Add(prop0.Name, prop0.Value.GetString()); + } + } + models = dictionary; + continue; + } + if (prop.NameEquals("supportedModels"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + supportedModels = SupportedModels.DeserializeSupportedModels(prop.Value, options); + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new ContentAnalyzer( + analyzerId, + description, + tags ?? new ChangeTrackingDictionary(), + status, + createdAt, + lastModifiedAt, + warnings ?? new ChangeTrackingList(), + baseAnalyzerId, + config, + fieldSchema, + dynamicFieldSchema, + processingLocation, + knowledgeSources ?? new ChangeTrackingList(), + models ?? new ChangeTrackingDictionary(), + supportedModels, + additionalBinaryDataProperties); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(ContentAnalyzer)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + ContentAnalyzer IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected virtual ContentAnalyzer PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeContentAnalyzer(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(ContentAnalyzer)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// The to serialize into . + public static implicit operator RequestContent(ContentAnalyzer contentAnalyzer) + { + if (contentAnalyzer == null) + { + return null; + } + Utf8JsonRequestContent content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(contentAnalyzer, ModelSerializationExtensions.WireOptions); + return content; + } + + /// The to deserialize the from. + public static explicit operator ContentAnalyzer(Response response) + { + using JsonDocument document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeContentAnalyzer(document.RootElement, ModelSerializationExtensions.WireOptions); + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentAnalyzer.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentAnalyzer.cs new file mode 100644 index 000000000000..b234fc347400 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentAnalyzer.cs @@ -0,0 +1,117 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using Azure; + +namespace Azure.AI.ContentUnderstanding +{ + /// Analyzer that extracts content and fields from multimodal documents. + public partial class ContentAnalyzer + { + /// Keeps track of any properties unknown to the library. + private protected readonly IDictionary _additionalBinaryDataProperties; + + /// Initializes a new instance of . + public ContentAnalyzer() + { + Tags = new ChangeTrackingDictionary(); + Warnings = new ChangeTrackingList(); + KnowledgeSources = new ChangeTrackingList(); + Models = new ChangeTrackingDictionary(); + } + + /// Initializes a new instance of . + /// The unique identifier of the analyzer. + /// A description of the analyzer. + /// Tags associated with the analyzer. + /// The status of the analyzer. + /// The date and time when the analyzer was created. + /// The date and time when the analyzer was last modified. + /// Warnings encountered while creating the analyzer. + /// The analyzer to incrementally train from. + /// Analyzer configuration settings. + /// The schema of fields to extracted. + /// Indicates whether the result may contain additional fields outside of the defined schema. + /// The location where the data may be processed. Defaults to global. + /// Additional knowledge sources used to enhance the analyzer. + /// + /// Mapping of model roles to specific model names. + /// Ex. { "completion": "gpt-4.1", "embedding": "text-embedding-3-large" }. + /// + /// Chat completion and embedding models supported by the analyzer. + /// Keeps track of any properties unknown to the library. + internal ContentAnalyzer(string analyzerId, string description, IDictionary tags, ContentAnalyzerStatus status, DateTimeOffset createdAt, DateTimeOffset lastModifiedAt, IReadOnlyList warnings, string baseAnalyzerId, ContentAnalyzerConfig config, ContentFieldSchema fieldSchema, bool? dynamicFieldSchema, ProcessingLocation? processingLocation, IList knowledgeSources, IDictionary models, SupportedModels supportedModels, IDictionary additionalBinaryDataProperties) + { + AnalyzerId = analyzerId; + Description = description; + Tags = tags; + Status = status; + CreatedAt = createdAt; + LastModifiedAt = lastModifiedAt; + Warnings = warnings; + BaseAnalyzerId = baseAnalyzerId; + Config = config; + FieldSchema = fieldSchema; + DynamicFieldSchema = dynamicFieldSchema; + ProcessingLocation = processingLocation; + KnowledgeSources = knowledgeSources; + Models = models; + SupportedModels = supportedModels; + _additionalBinaryDataProperties = additionalBinaryDataProperties; + } + + /// The unique identifier of the analyzer. + public string AnalyzerId { get; } + + /// A description of the analyzer. + public string Description { get; set; } + + /// Tags associated with the analyzer. + public IDictionary Tags { get; } + + /// The status of the analyzer. + public ContentAnalyzerStatus Status { get; } + + /// The date and time when the analyzer was created. + public DateTimeOffset CreatedAt { get; } + + /// The date and time when the analyzer was last modified. + public DateTimeOffset LastModifiedAt { get; } + + /// Warnings encountered while creating the analyzer. + public IReadOnlyList Warnings { get; } + + /// The analyzer to incrementally train from. + public string BaseAnalyzerId { get; set; } + + /// Analyzer configuration settings. + public ContentAnalyzerConfig Config { get; set; } + + /// The schema of fields to extracted. + public ContentFieldSchema FieldSchema { get; set; } + + /// Indicates whether the result may contain additional fields outside of the defined schema. + public bool? DynamicFieldSchema { get; set; } + + /// The location where the data may be processed. Defaults to global. + public ProcessingLocation? ProcessingLocation { get; set; } + + /// Additional knowledge sources used to enhance the analyzer. + public IList KnowledgeSources { get; } + + /// + /// Mapping of model roles to specific model names. + /// Ex. { "completion": "gpt-4.1", "embedding": "text-embedding-3-large" }. + /// + public IDictionary Models { get; } + + /// Chat completion and embedding models supported by the analyzer. + public SupportedModels SupportedModels { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentAnalyzerAnalyzeOperationStatus.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentAnalyzerAnalyzeOperationStatus.Serialization.cs new file mode 100644 index 000000000000..937f2d4a2c8a --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentAnalyzerAnalyzeOperationStatus.Serialization.cs @@ -0,0 +1,210 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text; +using System.Text.Json; +using Azure; + +namespace Azure.AI.ContentUnderstanding +{ + /// Provides status details for analyze operations. + internal partial class ContentAnalyzerAnalyzeOperationStatus : IJsonModel + { + /// Initializes a new instance of for deserialization. + internal ContentAnalyzerAnalyzeOperationStatus() + { + } + + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ContentAnalyzerAnalyzeOperationStatus)} does not support writing '{format}' format."); + } + writer.WritePropertyName("id"u8); + writer.WriteStringValue(Id); + writer.WritePropertyName("status"u8); + writer.WriteStringValue(Status.ToString()); + if (Optional.IsDefined(Error)) + { + writer.WritePropertyName("error"u8); + ((IJsonModel)Error).Write(writer, options); + } + if (Optional.IsDefined(Result)) + { + writer.WritePropertyName("result"u8); + writer.WriteObjectValue(Result, options); + } + if (Optional.IsDefined(Usage)) + { + writer.WritePropertyName("usage"u8); + writer.WriteObjectValue(Usage, options); + } + if (options.Format != "W" && _additionalBinaryDataProperties != null) + { + foreach (var item in _additionalBinaryDataProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + ContentAnalyzerAnalyzeOperationStatus IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected virtual ContentAnalyzerAnalyzeOperationStatus JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ContentAnalyzerAnalyzeOperationStatus)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeContentAnalyzerAnalyzeOperationStatus(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static ContentAnalyzerAnalyzeOperationStatus DeserializeContentAnalyzerAnalyzeOperationStatus(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string id = default; + OperationState status = default; + ResponseError error = default; + AnalyzeResult result = default; + UsageDetails usage = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("id"u8)) + { + id = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("status"u8)) + { + status = new OperationState(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("error"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + error = ModelReaderWriter.Read(new BinaryData(Encoding.UTF8.GetBytes(prop.Value.GetRawText())), options, AzureAIContentUnderstandingContext.Default); + continue; + } + if (prop.NameEquals("result"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + result = AnalyzeResult.DeserializeAnalyzeResult(prop.Value, options); + continue; + } + if (prop.NameEquals("usage"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + usage = UsageDetails.DeserializeUsageDetails(prop.Value, options); + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new ContentAnalyzerAnalyzeOperationStatus( + id, + status, + error, + result, + usage, + additionalBinaryDataProperties); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(ContentAnalyzerAnalyzeOperationStatus)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + ContentAnalyzerAnalyzeOperationStatus IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected virtual ContentAnalyzerAnalyzeOperationStatus PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeContentAnalyzerAnalyzeOperationStatus(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(ContentAnalyzerAnalyzeOperationStatus)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// The to deserialize the from. + public static explicit operator ContentAnalyzerAnalyzeOperationStatus(Response response) + { + using JsonDocument document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeContentAnalyzerAnalyzeOperationStatus(document.RootElement, ModelSerializationExtensions.WireOptions); + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentAnalyzerAnalyzeOperationStatus.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentAnalyzerAnalyzeOperationStatus.cs new file mode 100644 index 000000000000..03f3e5d28629 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentAnalyzerAnalyzeOperationStatus.cs @@ -0,0 +1,61 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using Azure; + +namespace Azure.AI.ContentUnderstanding +{ + /// Provides status details for analyze operations. + internal partial class ContentAnalyzerAnalyzeOperationStatus + { + /// Keeps track of any properties unknown to the library. + private protected readonly IDictionary _additionalBinaryDataProperties; + + /// Initializes a new instance of . + /// The unique ID of the operation. + /// The status of the operation. + internal ContentAnalyzerAnalyzeOperationStatus(string id, OperationState status) + { + Id = id; + Status = status; + } + + /// Initializes a new instance of . + /// The unique ID of the operation. + /// The status of the operation. + /// Error object that describes the error when status is "Failed". + /// The result of the operation. + /// Usage details of the analyze operation. + /// Keeps track of any properties unknown to the library. + internal ContentAnalyzerAnalyzeOperationStatus(string id, OperationState status, ResponseError error, AnalyzeResult result, UsageDetails usage, IDictionary additionalBinaryDataProperties) + { + Id = id; + Status = status; + Error = error; + Result = result; + Usage = usage; + _additionalBinaryDataProperties = additionalBinaryDataProperties; + } + + /// The unique ID of the operation. + public string Id { get; } + + /// The status of the operation. + public OperationState Status { get; } + + /// Error object that describes the error when status is "Failed". + public ResponseError Error { get; } + + /// The result of the operation. + public AnalyzeResult Result { get; } + + /// Usage details of the analyze operation. + public UsageDetails Usage { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentAnalyzerConfig.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentAnalyzerConfig.Serialization.cs new file mode 100644 index 000000000000..d4bc81a60d4e --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentAnalyzerConfig.Serialization.cs @@ -0,0 +1,419 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// Configuration settings for an analyzer. + public partial class ContentAnalyzerConfig : IJsonModel + { + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ContentAnalyzerConfig)} does not support writing '{format}' format."); + } + if (Optional.IsDefined(ReturnDetails)) + { + writer.WritePropertyName("returnDetails"u8); + writer.WriteBooleanValue(ReturnDetails.Value); + } + if (Optional.IsCollectionDefined(Locales)) + { + writer.WritePropertyName("locales"u8); + writer.WriteStartArray(); + foreach (string item in Locales) + { + if (item == null) + { + writer.WriteNullValue(); + continue; + } + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + } + if (Optional.IsDefined(EnableOcr)) + { + writer.WritePropertyName("enableOcr"u8); + writer.WriteBooleanValue(EnableOcr.Value); + } + if (Optional.IsDefined(EnableLayout)) + { + writer.WritePropertyName("enableLayout"u8); + writer.WriteBooleanValue(EnableLayout.Value); + } + if (Optional.IsDefined(EnableFigureDescription)) + { + writer.WritePropertyName("enableFigureDescription"u8); + writer.WriteBooleanValue(EnableFigureDescription.Value); + } + if (Optional.IsDefined(EnableFigureAnalysis)) + { + writer.WritePropertyName("enableFigureAnalysis"u8); + writer.WriteBooleanValue(EnableFigureAnalysis.Value); + } + if (Optional.IsDefined(EnableFormula)) + { + writer.WritePropertyName("enableFormula"u8); + writer.WriteBooleanValue(EnableFormula.Value); + } + if (Optional.IsDefined(TableFormat)) + { + writer.WritePropertyName("tableFormat"u8); + writer.WriteStringValue(TableFormat.Value.ToString()); + } + if (Optional.IsDefined(ChartFormat)) + { + writer.WritePropertyName("chartFormat"u8); + writer.WriteStringValue(ChartFormat.Value.ToString()); + } + if (Optional.IsDefined(AnnotationFormat)) + { + writer.WritePropertyName("annotationFormat"u8); + writer.WriteStringValue(AnnotationFormat.Value.ToString()); + } + if (Optional.IsDefined(DisableFaceBlurring)) + { + writer.WritePropertyName("disableFaceBlurring"u8); + writer.WriteBooleanValue(DisableFaceBlurring.Value); + } + if (Optional.IsDefined(EstimateFieldSourceAndConfidence)) + { + writer.WritePropertyName("estimateFieldSourceAndConfidence"u8); + writer.WriteBooleanValue(EstimateFieldSourceAndConfidence.Value); + } + if (Optional.IsCollectionDefined(ContentCategories)) + { + writer.WritePropertyName("contentCategories"u8); + writer.WriteStartObject(); + foreach (var item in ContentCategories) + { + writer.WritePropertyName(item.Key); + writer.WriteObjectValue(item.Value, options); + } + writer.WriteEndObject(); + } + if (Optional.IsDefined(EnableSegment)) + { + writer.WritePropertyName("enableSegment"u8); + writer.WriteBooleanValue(EnableSegment.Value); + } + if (Optional.IsDefined(SegmentPerPage)) + { + writer.WritePropertyName("segmentPerPage"u8); + writer.WriteBooleanValue(SegmentPerPage.Value); + } + if (Optional.IsDefined(OmitContent)) + { + writer.WritePropertyName("omitContent"u8); + writer.WriteBooleanValue(OmitContent.Value); + } + if (options.Format != "W" && _additionalBinaryDataProperties != null) + { + foreach (var item in _additionalBinaryDataProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + ContentAnalyzerConfig IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected virtual ContentAnalyzerConfig JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ContentAnalyzerConfig)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeContentAnalyzerConfig(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static ContentAnalyzerConfig DeserializeContentAnalyzerConfig(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + bool? returnDetails = default; + IList locales = default; + bool? enableOcr = default; + bool? enableLayout = default; + bool? enableFigureDescription = default; + bool? enableFigureAnalysis = default; + bool? enableFormula = default; + TableFormat? tableFormat = default; + ChartFormat? chartFormat = default; + AnnotationFormat? annotationFormat = default; + bool? disableFaceBlurring = default; + bool? estimateFieldSourceAndConfidence = default; + IDictionary contentCategories = default; + bool? enableSegment = default; + bool? segmentPerPage = default; + bool? omitContent = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("returnDetails"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + returnDetails = prop.Value.GetBoolean(); + continue; + } + if (prop.NameEquals("locales"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + if (item.ValueKind == JsonValueKind.Null) + { + array.Add(null); + } + else + { + array.Add(item.GetString()); + } + } + locales = array; + continue; + } + if (prop.NameEquals("enableOcr"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + enableOcr = prop.Value.GetBoolean(); + continue; + } + if (prop.NameEquals("enableLayout"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + enableLayout = prop.Value.GetBoolean(); + continue; + } + if (prop.NameEquals("enableFigureDescription"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + enableFigureDescription = prop.Value.GetBoolean(); + continue; + } + if (prop.NameEquals("enableFigureAnalysis"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + enableFigureAnalysis = prop.Value.GetBoolean(); + continue; + } + if (prop.NameEquals("enableFormula"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + enableFormula = prop.Value.GetBoolean(); + continue; + } + if (prop.NameEquals("tableFormat"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + tableFormat = new TableFormat(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("chartFormat"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + chartFormat = new ChartFormat(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("annotationFormat"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + annotationFormat = new AnnotationFormat(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("disableFaceBlurring"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + disableFaceBlurring = prop.Value.GetBoolean(); + continue; + } + if (prop.NameEquals("estimateFieldSourceAndConfidence"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + estimateFieldSourceAndConfidence = prop.Value.GetBoolean(); + continue; + } + if (prop.NameEquals("contentCategories"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + Dictionary dictionary = new Dictionary(); + foreach (var prop0 in prop.Value.EnumerateObject()) + { + dictionary.Add(prop0.Name, ContentCategory.DeserializeContentCategory(prop0.Value, options)); + } + contentCategories = dictionary; + continue; + } + if (prop.NameEquals("enableSegment"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + enableSegment = prop.Value.GetBoolean(); + continue; + } + if (prop.NameEquals("segmentPerPage"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + segmentPerPage = prop.Value.GetBoolean(); + continue; + } + if (prop.NameEquals("omitContent"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + omitContent = prop.Value.GetBoolean(); + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new ContentAnalyzerConfig( + returnDetails, + locales ?? new ChangeTrackingList(), + enableOcr, + enableLayout, + enableFigureDescription, + enableFigureAnalysis, + enableFormula, + tableFormat, + chartFormat, + annotationFormat, + disableFaceBlurring, + estimateFieldSourceAndConfidence, + contentCategories ?? new ChangeTrackingDictionary(), + enableSegment, + segmentPerPage, + omitContent, + additionalBinaryDataProperties); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(ContentAnalyzerConfig)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + ContentAnalyzerConfig IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected virtual ContentAnalyzerConfig PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeContentAnalyzerConfig(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(ContentAnalyzerConfig)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentAnalyzerConfig.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentAnalyzerConfig.cs new file mode 100644 index 000000000000..c22c1322f9de --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentAnalyzerConfig.cs @@ -0,0 +1,119 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + /// Configuration settings for an analyzer. + public partial class ContentAnalyzerConfig + { + /// Keeps track of any properties unknown to the library. + private protected readonly IDictionary _additionalBinaryDataProperties; + + /// Initializes a new instance of . + public ContentAnalyzerConfig() + { + Locales = new ChangeTrackingList(); + ContentCategories = new ChangeTrackingDictionary(); + } + + /// Initializes a new instance of . + /// Return all content details. + /// List of locale hints for speech transcription. + /// Enable optical character recognition (OCR). + /// Enable layout analysis. + /// Enable generation of figure description. + /// Enable analysis of figures, such as charts and diagrams. + /// Enable mathematical formula detection. + /// Representation format of tables in analyze result markdown. + /// Representation format of charts in analyze result markdown. + /// Representation format of annotations in analyze result markdown. + /// Disable the default blurring of faces for privacy while processing the content. + /// Return field grounding source and confidence. + /// Map of categories to classify the input content(s) against. + /// Enable segmentation of the input by contentCategories. + /// Force segmentation of document content by page. + /// + /// Omit the content for this analyzer from analyze result. + /// Only return content(s) from additional analyzers specified in contentCategories, if any. + /// + /// Keeps track of any properties unknown to the library. + internal ContentAnalyzerConfig(bool? returnDetails, IList locales, bool? enableOcr, bool? enableLayout, bool? enableFigureDescription, bool? enableFigureAnalysis, bool? enableFormula, TableFormat? tableFormat, ChartFormat? chartFormat, AnnotationFormat? annotationFormat, bool? disableFaceBlurring, bool? estimateFieldSourceAndConfidence, IDictionary contentCategories, bool? enableSegment, bool? segmentPerPage, bool? omitContent, IDictionary additionalBinaryDataProperties) + { + ReturnDetails = returnDetails; + Locales = locales; + EnableOcr = enableOcr; + EnableLayout = enableLayout; + EnableFigureDescription = enableFigureDescription; + EnableFigureAnalysis = enableFigureAnalysis; + EnableFormula = enableFormula; + TableFormat = tableFormat; + ChartFormat = chartFormat; + AnnotationFormat = annotationFormat; + DisableFaceBlurring = disableFaceBlurring; + EstimateFieldSourceAndConfidence = estimateFieldSourceAndConfidence; + ContentCategories = contentCategories; + EnableSegment = enableSegment; + SegmentPerPage = segmentPerPage; + OmitContent = omitContent; + _additionalBinaryDataProperties = additionalBinaryDataProperties; + } + + /// Return all content details. + public bool? ReturnDetails { get; set; } + + /// List of locale hints for speech transcription. + public IList Locales { get; } + + /// Enable optical character recognition (OCR). + public bool? EnableOcr { get; set; } + + /// Enable layout analysis. + public bool? EnableLayout { get; set; } + + /// Enable generation of figure description. + public bool? EnableFigureDescription { get; set; } + + /// Enable analysis of figures, such as charts and diagrams. + public bool? EnableFigureAnalysis { get; set; } + + /// Enable mathematical formula detection. + public bool? EnableFormula { get; set; } + + /// Representation format of tables in analyze result markdown. + public TableFormat? TableFormat { get; set; } + + /// Representation format of charts in analyze result markdown. + public ChartFormat? ChartFormat { get; set; } + + /// Representation format of annotations in analyze result markdown. + public AnnotationFormat? AnnotationFormat { get; set; } + + /// Disable the default blurring of faces for privacy while processing the content. + public bool? DisableFaceBlurring { get; set; } + + /// Return field grounding source and confidence. + public bool? EstimateFieldSourceAndConfidence { get; set; } + + /// Map of categories to classify the input content(s) against. + public IDictionary ContentCategories { get; } + + /// Enable segmentation of the input by contentCategories. + public bool? EnableSegment { get; set; } + + /// Force segmentation of document content by page. + public bool? SegmentPerPage { get; set; } + + /// + /// Omit the content for this analyzer from analyze result. + /// Only return content(s) from additional analyzers specified in contentCategories, if any. + /// + public bool? OmitContent { get; set; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentAnalyzerOperationStatus.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentAnalyzerOperationStatus.Serialization.cs new file mode 100644 index 000000000000..9334243cd908 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentAnalyzerOperationStatus.Serialization.cs @@ -0,0 +1,210 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text; +using System.Text.Json; +using Azure; + +namespace Azure.AI.ContentUnderstanding +{ + /// Provides status details for analyzer creation operations. + internal partial class ContentAnalyzerOperationStatus : IJsonModel + { + /// Initializes a new instance of for deserialization. + internal ContentAnalyzerOperationStatus() + { + } + + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ContentAnalyzerOperationStatus)} does not support writing '{format}' format."); + } + writer.WritePropertyName("id"u8); + writer.WriteStringValue(Id); + writer.WritePropertyName("status"u8); + writer.WriteStringValue(Status.ToString()); + if (Optional.IsDefined(Error)) + { + writer.WritePropertyName("error"u8); + ((IJsonModel)Error).Write(writer, options); + } + if (Optional.IsDefined(Result)) + { + writer.WritePropertyName("result"u8); + writer.WriteObjectValue(Result, options); + } + if (Optional.IsDefined(Usage)) + { + writer.WritePropertyName("usage"u8); + writer.WriteObjectValue(Usage, options); + } + if (options.Format != "W" && _additionalBinaryDataProperties != null) + { + foreach (var item in _additionalBinaryDataProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + ContentAnalyzerOperationStatus IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected virtual ContentAnalyzerOperationStatus JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ContentAnalyzerOperationStatus)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeContentAnalyzerOperationStatus(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static ContentAnalyzerOperationStatus DeserializeContentAnalyzerOperationStatus(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string id = default; + OperationState status = default; + ResponseError error = default; + ContentAnalyzer result = default; + UsageDetails usage = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("id"u8)) + { + id = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("status"u8)) + { + status = new OperationState(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("error"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + error = ModelReaderWriter.Read(new BinaryData(Encoding.UTF8.GetBytes(prop.Value.GetRawText())), options, AzureAIContentUnderstandingContext.Default); + continue; + } + if (prop.NameEquals("result"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + result = ContentAnalyzer.DeserializeContentAnalyzer(prop.Value, options); + continue; + } + if (prop.NameEquals("usage"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + usage = UsageDetails.DeserializeUsageDetails(prop.Value, options); + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new ContentAnalyzerOperationStatus( + id, + status, + error, + result, + usage, + additionalBinaryDataProperties); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(ContentAnalyzerOperationStatus)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + ContentAnalyzerOperationStatus IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected virtual ContentAnalyzerOperationStatus PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeContentAnalyzerOperationStatus(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(ContentAnalyzerOperationStatus)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// The to deserialize the from. + public static explicit operator ContentAnalyzerOperationStatus(Response response) + { + using JsonDocument document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeContentAnalyzerOperationStatus(document.RootElement, ModelSerializationExtensions.WireOptions); + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentAnalyzerOperationStatus.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentAnalyzerOperationStatus.cs new file mode 100644 index 000000000000..f552d8a132ca --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentAnalyzerOperationStatus.cs @@ -0,0 +1,61 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using Azure; + +namespace Azure.AI.ContentUnderstanding +{ + /// Provides status details for analyzer creation operations. + internal partial class ContentAnalyzerOperationStatus + { + /// Keeps track of any properties unknown to the library. + private protected readonly IDictionary _additionalBinaryDataProperties; + + /// Initializes a new instance of . + /// The unique ID of the operation. + /// The status of the operation. + internal ContentAnalyzerOperationStatus(string id, OperationState status) + { + Id = id; + Status = status; + } + + /// Initializes a new instance of . + /// The unique ID of the operation. + /// The status of the operation. + /// Error object that describes the error when status is "Failed". + /// The result of the operation. + /// Usage details of the analyzer creation operation. + /// Keeps track of any properties unknown to the library. + internal ContentAnalyzerOperationStatus(string id, OperationState status, ResponseError error, ContentAnalyzer result, UsageDetails usage, IDictionary additionalBinaryDataProperties) + { + Id = id; + Status = status; + Error = error; + Result = result; + Usage = usage; + _additionalBinaryDataProperties = additionalBinaryDataProperties; + } + + /// The unique ID of the operation. + public string Id { get; } + + /// The status of the operation. + public OperationState Status { get; } + + /// Error object that describes the error when status is "Failed". + public ResponseError Error { get; } + + /// The result of the operation. + public ContentAnalyzer Result { get; } + + /// Usage details of the analyzer creation operation. + public UsageDetails Usage { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentAnalyzerStatus.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentAnalyzerStatus.cs new file mode 100644 index 000000000000..5cffb6d65032 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentAnalyzerStatus.cs @@ -0,0 +1,80 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.AI.ContentUnderstanding +{ + /// Status of a resource. + public readonly partial struct ContentAnalyzerStatus : IEquatable + { + private readonly string _value; + /// The resource is being created. + private const string CreatingValue = "creating"; + /// The resource is ready. + private const string ReadyValue = "ready"; + /// The resource is being deleted. + private const string DeletingValue = "deleting"; + /// The resource failed during creation. + private const string FailedValue = "failed"; + + /// Initializes a new instance of . + /// The value. + /// is null. + public ContentAnalyzerStatus(string value) + { + Argument.AssertNotNull(value, nameof(value)); + + _value = value; + } + + /// The resource is being created. + public static ContentAnalyzerStatus Creating { get; } = new ContentAnalyzerStatus(CreatingValue); + + /// The resource is ready. + public static ContentAnalyzerStatus Ready { get; } = new ContentAnalyzerStatus(ReadyValue); + + /// The resource is being deleted. + public static ContentAnalyzerStatus Deleting { get; } = new ContentAnalyzerStatus(DeletingValue); + + /// The resource failed during creation. + public static ContentAnalyzerStatus Failed { get; } = new ContentAnalyzerStatus(FailedValue); + + /// Determines if two values are the same. + /// The left value to compare. + /// The right value to compare. + public static bool operator ==(ContentAnalyzerStatus left, ContentAnalyzerStatus right) => left.Equals(right); + + /// Determines if two values are not the same. + /// The left value to compare. + /// The right value to compare. + public static bool operator !=(ContentAnalyzerStatus left, ContentAnalyzerStatus right) => !left.Equals(right); + + /// Converts a string to a . + /// The value. + public static implicit operator ContentAnalyzerStatus(string value) => new ContentAnalyzerStatus(value); + + /// Converts a string to a . + /// The value. + public static implicit operator ContentAnalyzerStatus?(string value) => value == null ? null : new ContentAnalyzerStatus(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is ContentAnalyzerStatus other && Equals(other); + + /// + public bool Equals(ContentAnalyzerStatus other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + + /// + public override string ToString() => _value; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentCategory.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentCategory.Serialization.cs new file mode 100644 index 000000000000..2a74a3977f84 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentCategory.Serialization.cs @@ -0,0 +1,166 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// Content category definition. + public partial class ContentCategory : IJsonModel + { + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ContentCategory)} does not support writing '{format}' format."); + } + if (Optional.IsDefined(Description)) + { + writer.WritePropertyName("description"u8); + writer.WriteStringValue(Description); + } + if (Optional.IsDefined(AnalyzerId)) + { + writer.WritePropertyName("analyzerId"u8); + writer.WriteStringValue(AnalyzerId); + } + if (Optional.IsDefined(Analyzer)) + { + writer.WritePropertyName("analyzer"u8); + writer.WriteObjectValue(Analyzer, options); + } + if (options.Format != "W" && _additionalBinaryDataProperties != null) + { + foreach (var item in _additionalBinaryDataProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + ContentCategory IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected virtual ContentCategory JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ContentCategory)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeContentCategory(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static ContentCategory DeserializeContentCategory(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string description = default; + string analyzerId = default; + ContentAnalyzer analyzer = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("description"u8)) + { + description = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("analyzerId"u8)) + { + analyzerId = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("analyzer"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + analyzer = ContentAnalyzer.DeserializeContentAnalyzer(prop.Value, options); + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new ContentCategory(description, analyzerId, analyzer, additionalBinaryDataProperties); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(ContentCategory)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + ContentCategory IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected virtual ContentCategory PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeContentCategory(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(ContentCategory)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentCategory.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentCategory.cs new file mode 100644 index 000000000000..f8b91f1bbfb7 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentCategory.cs @@ -0,0 +1,46 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + /// Content category definition. + public partial class ContentCategory + { + /// Keeps track of any properties unknown to the library. + private protected readonly IDictionary _additionalBinaryDataProperties; + + /// Initializes a new instance of . + public ContentCategory() + { + } + + /// Initializes a new instance of . + /// The description of the category. + /// Optional analyzer used to process the content. + /// Optional inline definition of analyzer used to process the content. + /// Keeps track of any properties unknown to the library. + internal ContentCategory(string description, string analyzerId, ContentAnalyzer analyzer, IDictionary additionalBinaryDataProperties) + { + Description = description; + AnalyzerId = analyzerId; + Analyzer = analyzer; + _additionalBinaryDataProperties = additionalBinaryDataProperties; + } + + /// The description of the category. + public string Description { get; set; } + + /// Optional analyzer used to process the content. + public string AnalyzerId { get; set; } + + /// Optional inline definition of analyzer used to process the content. + public ContentAnalyzer Analyzer { get; set; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentField.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentField.Serialization.cs new file mode 100644 index 000000000000..1b9ca1cf115a --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentField.Serialization.cs @@ -0,0 +1,175 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// + /// Field extracted from the content. + /// Please note this is the abstract base class. The derived classes available for instantiation are: , , , , , , , , and . + /// + [PersistableModelProxy(typeof(UnknownContentField))] + public abstract partial class ContentField : IJsonModel + { + /// Initializes a new instance of for deserialization. + internal ContentField() + { + } + + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ContentField)} does not support writing '{format}' format."); + } + writer.WritePropertyName("type"u8); + writer.WriteStringValue(Type.ToString()); + if (Optional.IsCollectionDefined(Spans)) + { + writer.WritePropertyName("spans"u8); + writer.WriteStartArray(); + foreach (ContentSpan item in Spans) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (Optional.IsDefined(Confidence)) + { + writer.WritePropertyName("confidence"u8); + writer.WriteNumberValue(Confidence.Value); + } + if (Optional.IsDefined(Source)) + { + writer.WritePropertyName("source"u8); + writer.WriteStringValue(Source); + } + if (options.Format != "W" && _additionalBinaryDataProperties != null) + { + foreach (var item in _additionalBinaryDataProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + ContentField IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected virtual ContentField JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ContentField)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeContentField(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static ContentField DeserializeContentField(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + if (element.TryGetProperty("type"u8, out JsonElement discriminator)) + { + switch (discriminator.GetString()) + { + case "string": + return StringField.DeserializeStringField(element, options); + case "date": + return DateField.DeserializeDateField(element, options); + case "time": + return TimeField.DeserializeTimeField(element, options); + case "number": + return NumberField.DeserializeNumberField(element, options); + case "integer": + return IntegerField.DeserializeIntegerField(element, options); + case "boolean": + return BooleanField.DeserializeBooleanField(element, options); + case "array": + return ArrayField.DeserializeArrayField(element, options); + case "object": + return ObjectField.DeserializeObjectField(element, options); + case "json": + return JsonField.DeserializeJsonField(element, options); + } + } + return UnknownContentField.DeserializeUnknownContentField(element, options); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(ContentField)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + ContentField IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected virtual ContentField PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeContentField(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(ContentField)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentField.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentField.cs new file mode 100644 index 000000000000..a7471d58eee5 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentField.cs @@ -0,0 +1,57 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + /// + /// Field extracted from the content. + /// Please note this is the abstract base class. The derived classes available for instantiation are: , , , , , , , , and . + /// + public abstract partial class ContentField + { + /// Keeps track of any properties unknown to the library. + private protected readonly IDictionary _additionalBinaryDataProperties; + + /// Initializes a new instance of . + /// Semantic data type of the field value. + private protected ContentField(ContentFieldType @type) + { + Type = @type; + Spans = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// Semantic data type of the field value. + /// Span(s) associated with the field value in the markdown content. + /// Confidence of predicting the field value. + /// Encoded source that identifies the position of the field value in the content. + /// Keeps track of any properties unknown to the library. + internal ContentField(ContentFieldType @type, IList spans, float? confidence, string source, IDictionary additionalBinaryDataProperties) + { + Type = @type; + Spans = spans; + Confidence = confidence; + Source = source; + _additionalBinaryDataProperties = additionalBinaryDataProperties; + } + + /// Semantic data type of the field value. + internal ContentFieldType Type { get; set; } + + /// Span(s) associated with the field value in the markdown content. + public IList Spans { get; } + + /// Confidence of predicting the field value. + public float? Confidence { get; } + + /// Encoded source that identifies the position of the field value in the content. + public string Source { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentFieldDefinition.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentFieldDefinition.Serialization.cs new file mode 100644 index 000000000000..37a4fc46982b --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentFieldDefinition.Serialization.cs @@ -0,0 +1,360 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// Definition of the field using a JSON Schema like syntax. + public partial class ContentFieldDefinition : IJsonModel + { + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ContentFieldDefinition)} does not support writing '{format}' format."); + } + if (Optional.IsDefined(Method)) + { + writer.WritePropertyName("method"u8); + writer.WriteStringValue(Method.Value.ToString()); + } + if (Optional.IsDefined(Type)) + { + writer.WritePropertyName("type"u8); + writer.WriteStringValue(Type.Value.ToString()); + } + if (Optional.IsDefined(Description)) + { + writer.WritePropertyName("description"u8); + writer.WriteStringValue(Description); + } + if (Optional.IsDefined(ItemDefinition)) + { + writer.WritePropertyName("items"u8); + writer.WriteObjectValue(ItemDefinition, options); + } + if (Optional.IsCollectionDefined(Properties)) + { + writer.WritePropertyName("properties"u8); + writer.WriteStartObject(); + foreach (var item in Properties) + { + writer.WritePropertyName(item.Key); + writer.WriteObjectValue(item.Value, options); + } + writer.WriteEndObject(); + } + if (Optional.IsCollectionDefined(Examples)) + { + writer.WritePropertyName("examples"u8); + writer.WriteStartArray(); + foreach (string item in Examples) + { + if (item == null) + { + writer.WriteNullValue(); + continue; + } + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + } + if (Optional.IsCollectionDefined(Enum)) + { + writer.WritePropertyName("enum"u8); + writer.WriteStartArray(); + foreach (string item in Enum) + { + if (item == null) + { + writer.WriteNullValue(); + continue; + } + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + } + if (Optional.IsCollectionDefined(EnumDescriptions)) + { + writer.WritePropertyName("enumDescriptions"u8); + writer.WriteStartObject(); + foreach (var item in EnumDescriptions) + { + writer.WritePropertyName(item.Key); + if (item.Value == null) + { + writer.WriteNullValue(); + continue; + } + writer.WriteStringValue(item.Value); + } + writer.WriteEndObject(); + } + if (Optional.IsDefined(Ref)) + { + writer.WritePropertyName("$ref"u8); + writer.WriteStringValue(Ref); + } + if (Optional.IsDefined(EstimateSourceAndConfidence)) + { + writer.WritePropertyName("estimateSourceAndConfidence"u8); + writer.WriteBooleanValue(EstimateSourceAndConfidence.Value); + } + if (options.Format != "W" && _additionalBinaryDataProperties != null) + { + foreach (var item in _additionalBinaryDataProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + ContentFieldDefinition IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected virtual ContentFieldDefinition JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ContentFieldDefinition)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeContentFieldDefinition(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static ContentFieldDefinition DeserializeContentFieldDefinition(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + GenerationMethod? @method = default; + ContentFieldType? @type = default; + string description = default; + ContentFieldDefinition itemDefinition = default; + IDictionary properties = default; + IList examples = default; + IList @enum = default; + IDictionary enumDescriptions = default; + string @ref = default; + bool? estimateSourceAndConfidence = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("method"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + @method = new GenerationMethod(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("type"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + @type = new ContentFieldType(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("description"u8)) + { + description = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("items"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + itemDefinition = DeserializeContentFieldDefinition(prop.Value, options); + continue; + } + if (prop.NameEquals("properties"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + Dictionary dictionary = new Dictionary(); + foreach (var prop0 in prop.Value.EnumerateObject()) + { + dictionary.Add(prop0.Name, DeserializeContentFieldDefinition(prop0.Value, options)); + } + properties = dictionary; + continue; + } + if (prop.NameEquals("examples"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + if (item.ValueKind == JsonValueKind.Null) + { + array.Add(null); + } + else + { + array.Add(item.GetString()); + } + } + examples = array; + continue; + } + if (prop.NameEquals("enum"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + if (item.ValueKind == JsonValueKind.Null) + { + array.Add(null); + } + else + { + array.Add(item.GetString()); + } + } + @enum = array; + continue; + } + if (prop.NameEquals("enumDescriptions"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + Dictionary dictionary = new Dictionary(); + foreach (var prop0 in prop.Value.EnumerateObject()) + { + if (prop0.Value.ValueKind == JsonValueKind.Null) + { + dictionary.Add(prop0.Name, null); + } + else + { + dictionary.Add(prop0.Name, prop0.Value.GetString()); + } + } + enumDescriptions = dictionary; + continue; + } + if (prop.NameEquals("$ref"u8)) + { + @ref = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("estimateSourceAndConfidence"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + estimateSourceAndConfidence = prop.Value.GetBoolean(); + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new ContentFieldDefinition( + @method, + @type, + description, + itemDefinition, + properties ?? new ChangeTrackingDictionary(), + examples ?? new ChangeTrackingList(), + @enum ?? new ChangeTrackingList(), + enumDescriptions ?? new ChangeTrackingDictionary(), + @ref, + estimateSourceAndConfidence, + additionalBinaryDataProperties); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(ContentFieldDefinition)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + ContentFieldDefinition IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected virtual ContentFieldDefinition PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeContentFieldDefinition(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(ContentFieldDefinition)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentFieldDefinition.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentFieldDefinition.cs new file mode 100644 index 000000000000..adc4c03fea06 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentFieldDefinition.cs @@ -0,0 +1,85 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + /// Definition of the field using a JSON Schema like syntax. + public partial class ContentFieldDefinition + { + /// Keeps track of any properties unknown to the library. + private protected readonly IDictionary _additionalBinaryDataProperties; + + /// Initializes a new instance of . + public ContentFieldDefinition() + { + Properties = new ChangeTrackingDictionary(); + Examples = new ChangeTrackingList(); + Enum = new ChangeTrackingList(); + EnumDescriptions = new ChangeTrackingDictionary(); + } + + /// Initializes a new instance of . + /// Generation method. + /// Semantic data type of the field value. + /// Field description. + /// Field type schema of each array element, if type is array. + /// Named sub-fields, if type is object. + /// Examples of field values. + /// Enumeration of possible field values. + /// Descriptions for each enumeration value. + /// Reference to another field definition. + /// Return grounding source and confidence. + /// Keeps track of any properties unknown to the library. + internal ContentFieldDefinition(GenerationMethod? @method, ContentFieldType? @type, string description, ContentFieldDefinition itemDefinition, IDictionary properties, IList examples, IList @enum, IDictionary enumDescriptions, string @ref, bool? estimateSourceAndConfidence, IDictionary additionalBinaryDataProperties) + { + Method = @method; + Type = @type; + Description = description; + ItemDefinition = itemDefinition; + Properties = properties; + Examples = examples; + Enum = @enum; + EnumDescriptions = enumDescriptions; + Ref = @ref; + EstimateSourceAndConfidence = estimateSourceAndConfidence; + _additionalBinaryDataProperties = additionalBinaryDataProperties; + } + + /// Generation method. + public GenerationMethod? Method { get; set; } + + /// Semantic data type of the field value. + public ContentFieldType? Type { get; set; } + + /// Field description. + public string Description { get; set; } + + /// Field type schema of each array element, if type is array. + public ContentFieldDefinition ItemDefinition { get; set; } + + /// Named sub-fields, if type is object. + public IDictionary Properties { get; } + + /// Examples of field values. + public IList Examples { get; } + + /// Enumeration of possible field values. + public IList Enum { get; } + + /// Descriptions for each enumeration value. + public IDictionary EnumDescriptions { get; } + + /// Reference to another field definition. + public string Ref { get; set; } + + /// Return grounding source and confidence. + public bool? EstimateSourceAndConfidence { get; set; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentFieldSchema.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentFieldSchema.Serialization.cs new file mode 100644 index 000000000000..fbcc78ab1fe7 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentFieldSchema.Serialization.cs @@ -0,0 +1,201 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// Schema of fields to be extracted from documents. + public partial class ContentFieldSchema : IJsonModel + { + /// Initializes a new instance of for deserialization. + internal ContentFieldSchema() + { + } + + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ContentFieldSchema)} does not support writing '{format}' format."); + } + if (Optional.IsDefined(Name)) + { + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + } + if (Optional.IsDefined(Description)) + { + writer.WritePropertyName("description"u8); + writer.WriteStringValue(Description); + } + writer.WritePropertyName("fields"u8); + writer.WriteStartObject(); + foreach (var item in Fields) + { + writer.WritePropertyName(item.Key); + writer.WriteObjectValue(item.Value, options); + } + writer.WriteEndObject(); + if (Optional.IsCollectionDefined(Definitions)) + { + writer.WritePropertyName("definitions"u8); + writer.WriteStartObject(); + foreach (var item in Definitions) + { + writer.WritePropertyName(item.Key); + writer.WriteObjectValue(item.Value, options); + } + writer.WriteEndObject(); + } + if (options.Format != "W" && _additionalBinaryDataProperties != null) + { + foreach (var item in _additionalBinaryDataProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + ContentFieldSchema IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected virtual ContentFieldSchema JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ContentFieldSchema)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeContentFieldSchema(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static ContentFieldSchema DeserializeContentFieldSchema(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + string description = default; + IDictionary fields = default; + IDictionary definitions = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("name"u8)) + { + name = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("description"u8)) + { + description = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("fields"u8)) + { + Dictionary dictionary = new Dictionary(); + foreach (var prop0 in prop.Value.EnumerateObject()) + { + dictionary.Add(prop0.Name, ContentFieldDefinition.DeserializeContentFieldDefinition(prop0.Value, options)); + } + fields = dictionary; + continue; + } + if (prop.NameEquals("definitions"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + Dictionary dictionary = new Dictionary(); + foreach (var prop0 in prop.Value.EnumerateObject()) + { + dictionary.Add(prop0.Name, ContentFieldDefinition.DeserializeContentFieldDefinition(prop0.Value, options)); + } + definitions = dictionary; + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new ContentFieldSchema(name, description, fields, definitions ?? new ChangeTrackingDictionary(), additionalBinaryDataProperties); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(ContentFieldSchema)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + ContentFieldSchema IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected virtual ContentFieldSchema PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeContentFieldSchema(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(ContentFieldSchema)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentFieldSchema.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentFieldSchema.cs new file mode 100644 index 000000000000..3e1c45d9d69d --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentFieldSchema.cs @@ -0,0 +1,57 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + /// Schema of fields to be extracted from documents. + public partial class ContentFieldSchema + { + /// Keeps track of any properties unknown to the library. + private protected readonly IDictionary _additionalBinaryDataProperties; + + /// Initializes a new instance of . + /// The fields defined in the schema. + /// is null. + public ContentFieldSchema(IDictionary fields) + { + Argument.AssertNotNull(fields, nameof(fields)); + + Fields = fields; + Definitions = new ChangeTrackingDictionary(); + } + + /// Initializes a new instance of . + /// The name of the field schema. + /// A description of the field schema. + /// The fields defined in the schema. + /// Additional definitions referenced by the fields in the schema. + /// Keeps track of any properties unknown to the library. + internal ContentFieldSchema(string name, string description, IDictionary fields, IDictionary definitions, IDictionary additionalBinaryDataProperties) + { + Name = name; + Description = description; + Fields = fields; + Definitions = definitions; + _additionalBinaryDataProperties = additionalBinaryDataProperties; + } + + /// The name of the field schema. + public string Name { get; set; } + + /// A description of the field schema. + public string Description { get; set; } + + /// The fields defined in the schema. + public IDictionary Fields { get; } + + /// Additional definitions referenced by the fields in the schema. + public IDictionary Definitions { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentFieldType.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentFieldType.cs new file mode 100644 index 000000000000..e37907300d11 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentFieldType.cs @@ -0,0 +1,105 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.AI.ContentUnderstanding +{ + /// Semantic data type of the field value. + public readonly partial struct ContentFieldType : IEquatable + { + private readonly string _value; + /// Plain text. + private const string StringValue = "string"; + /// Date, normalized to ISO 8601 (YYYY-MM-DD) format. + private const string DateValue = "date"; + /// Time, normalized to ISO 8601 (hh:mm:ss) format. + private const string TimeValue = "time"; + /// Number as double precision floating point. + private const string NumberValue = "number"; + /// Integer as 64-bit signed integer. + private const string IntegerValue = "integer"; + /// Boolean value. + private const string BooleanValue = "boolean"; + /// List of subfields of the same type. + private const string ArrayValue = "array"; + /// Named list of subfields. + private const string ObjectValue = "object"; + /// JSON object. + private const string JsonValue = "json"; + + /// Initializes a new instance of . + /// The value. + /// is null. + public ContentFieldType(string value) + { + Argument.AssertNotNull(value, nameof(value)); + + _value = value; + } + + /// Plain text. + public static ContentFieldType String { get; } = new ContentFieldType(StringValue); + + /// Date, normalized to ISO 8601 (YYYY-MM-DD) format. + public static ContentFieldType Date { get; } = new ContentFieldType(DateValue); + + /// Time, normalized to ISO 8601 (hh:mm:ss) format. + public static ContentFieldType Time { get; } = new ContentFieldType(TimeValue); + + /// Number as double precision floating point. + public static ContentFieldType Number { get; } = new ContentFieldType(NumberValue); + + /// Integer as 64-bit signed integer. + public static ContentFieldType Integer { get; } = new ContentFieldType(IntegerValue); + + /// Boolean value. + public static ContentFieldType Boolean { get; } = new ContentFieldType(BooleanValue); + + /// List of subfields of the same type. + public static ContentFieldType Array { get; } = new ContentFieldType(ArrayValue); + + /// Named list of subfields. + public static ContentFieldType Object { get; } = new ContentFieldType(ObjectValue); + + /// JSON object. + public static ContentFieldType Json { get; } = new ContentFieldType(JsonValue); + + /// Determines if two values are the same. + /// The left value to compare. + /// The right value to compare. + public static bool operator ==(ContentFieldType left, ContentFieldType right) => left.Equals(right); + + /// Determines if two values are not the same. + /// The left value to compare. + /// The right value to compare. + public static bool operator !=(ContentFieldType left, ContentFieldType right) => !left.Equals(right); + + /// Converts a string to a . + /// The value. + public static implicit operator ContentFieldType(string value) => new ContentFieldType(value); + + /// Converts a string to a . + /// The value. + public static implicit operator ContentFieldType?(string value) => value == null ? null : new ContentFieldType(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is ContentFieldType other && Equals(other); + + /// + public bool Equals(ContentFieldType other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + + /// + public override string ToString() => _value; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentSpan.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentSpan.Serialization.cs new file mode 100644 index 000000000000..442e147d305a --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentSpan.Serialization.cs @@ -0,0 +1,150 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// Position of the element in markdown, specified as a character offset and length. + public partial class ContentSpan : IJsonModel + { + /// Initializes a new instance of for deserialization. + internal ContentSpan() + { + } + + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ContentSpan)} does not support writing '{format}' format."); + } + writer.WritePropertyName("offset"u8); + writer.WriteNumberValue(Offset); + writer.WritePropertyName("length"u8); + writer.WriteNumberValue(Length); + if (options.Format != "W" && _additionalBinaryDataProperties != null) + { + foreach (var item in _additionalBinaryDataProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + ContentSpan IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected virtual ContentSpan JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ContentSpan)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeContentSpan(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static ContentSpan DeserializeContentSpan(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + int offset = default; + int length = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("offset"u8)) + { + offset = prop.Value.GetInt32(); + continue; + } + if (prop.NameEquals("length"u8)) + { + length = prop.Value.GetInt32(); + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new ContentSpan(offset, length, additionalBinaryDataProperties); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(ContentSpan)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + ContentSpan IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected virtual ContentSpan PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeContentSpan(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(ContentSpan)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentSpan.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentSpan.cs new file mode 100644 index 000000000000..4e78e6b6d7bd --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentSpan.cs @@ -0,0 +1,45 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + /// Position of the element in markdown, specified as a character offset and length. + public partial class ContentSpan + { + /// Keeps track of any properties unknown to the library. + private protected readonly IDictionary _additionalBinaryDataProperties; + + /// Initializes a new instance of . + /// Starting position (0-indexed) of the element in markdown, specified in characters. + /// Length of the element in markdown, specified in characters. + internal ContentSpan(int offset, int length) + { + Offset = offset; + Length = length; + } + + /// Initializes a new instance of . + /// Starting position (0-indexed) of the element in markdown, specified in characters. + /// Length of the element in markdown, specified in characters. + /// Keeps track of any properties unknown to the library. + internal ContentSpan(int offset, int length, IDictionary additionalBinaryDataProperties) + { + Offset = offset; + Length = length; + _additionalBinaryDataProperties = additionalBinaryDataProperties; + } + + /// Starting position (0-indexed) of the element in markdown, specified in characters. + public int Offset { get; } + + /// Length of the element in markdown, specified in characters. + public int Length { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentUnderstandingDefaults.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentUnderstandingDefaults.Serialization.cs new file mode 100644 index 000000000000..314613cfe6a1 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentUnderstandingDefaults.Serialization.cs @@ -0,0 +1,173 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure; + +namespace Azure.AI.ContentUnderstanding +{ + /// default settings for this Content Understanding resource. + public partial class ContentUnderstandingDefaults : IJsonModel + { + /// Initializes a new instance of for deserialization. + internal ContentUnderstandingDefaults() + { + } + + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ContentUnderstandingDefaults)} does not support writing '{format}' format."); + } + writer.WritePropertyName("modelDeployments"u8); + writer.WriteStartObject(); + foreach (var item in ModelDeployments) + { + writer.WritePropertyName(item.Key); + if (item.Value == null) + { + writer.WriteNullValue(); + continue; + } + writer.WriteStringValue(item.Value); + } + writer.WriteEndObject(); + if (options.Format != "W" && _additionalBinaryDataProperties != null) + { + foreach (var item in _additionalBinaryDataProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + ContentUnderstandingDefaults IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected virtual ContentUnderstandingDefaults JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ContentUnderstandingDefaults)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeContentUnderstandingDefaults(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static ContentUnderstandingDefaults DeserializeContentUnderstandingDefaults(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IDictionary modelDeployments = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("modelDeployments"u8)) + { + Dictionary dictionary = new Dictionary(); + foreach (var prop0 in prop.Value.EnumerateObject()) + { + if (prop0.Value.ValueKind == JsonValueKind.Null) + { + dictionary.Add(prop0.Name, null); + } + else + { + dictionary.Add(prop0.Name, prop0.Value.GetString()); + } + } + modelDeployments = dictionary; + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new ContentUnderstandingDefaults(modelDeployments, additionalBinaryDataProperties); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(ContentUnderstandingDefaults)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + ContentUnderstandingDefaults IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected virtual ContentUnderstandingDefaults PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeContentUnderstandingDefaults(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(ContentUnderstandingDefaults)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// The to deserialize the from. + public static explicit operator ContentUnderstandingDefaults(Response response) + { + using JsonDocument document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeContentUnderstandingDefaults(document.RootElement, ModelSerializationExtensions.WireOptions); + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentUnderstandingDefaults.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentUnderstandingDefaults.cs new file mode 100644 index 000000000000..453b7846e317 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ContentUnderstandingDefaults.cs @@ -0,0 +1,47 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + /// default settings for this Content Understanding resource. + public partial class ContentUnderstandingDefaults + { + /// Keeps track of any properties unknown to the library. + private protected readonly IDictionary _additionalBinaryDataProperties; + + /// Initializes a new instance of . + /// + /// Mapping of model names to deployments. + /// Ex. { "gpt-4.1": "myGpt41Deployment", "text-embedding-3-large": "myTextEmbedding3LargeDeployment" }. + /// + internal ContentUnderstandingDefaults(IDictionary modelDeployments) + { + ModelDeployments = modelDeployments; + } + + /// Initializes a new instance of . + /// + /// Mapping of model names to deployments. + /// Ex. { "gpt-4.1": "myGpt41Deployment", "text-embedding-3-large": "myTextEmbedding3LargeDeployment" }. + /// + /// Keeps track of any properties unknown to the library. + internal ContentUnderstandingDefaults(IDictionary modelDeployments, IDictionary additionalBinaryDataProperties) + { + ModelDeployments = modelDeployments; + _additionalBinaryDataProperties = additionalBinaryDataProperties; + } + + /// + /// Mapping of model names to deployments. + /// Ex. { "gpt-4.1": "myGpt41Deployment", "text-embedding-3-large": "myTextEmbedding3LargeDeployment" }. + /// + public IDictionary ModelDeployments { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/CopyAnalyzerRequest.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/CopyAnalyzerRequest.Serialization.cs new file mode 100644 index 000000000000..ced566ed5dd1 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/CopyAnalyzerRequest.Serialization.cs @@ -0,0 +1,177 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.ContentUnderstanding +{ + /// The CopyAnalyzerRequest. + internal partial class CopyAnalyzerRequest : IJsonModel + { + /// Initializes a new instance of for deserialization. + internal CopyAnalyzerRequest() + { + } + + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CopyAnalyzerRequest)} does not support writing '{format}' format."); + } + if (Optional.IsDefined(SourceAzureResourceId)) + { + writer.WritePropertyName("sourceAzureResourceId"u8); + writer.WriteStringValue(SourceAzureResourceId); + } + if (Optional.IsDefined(SourceRegion)) + { + writer.WritePropertyName("sourceRegion"u8); + writer.WriteStringValue(SourceRegion); + } + writer.WritePropertyName("sourceAnalyzerId"u8); + writer.WriteStringValue(SourceAnalyzerId); + if (options.Format != "W" && _additionalBinaryDataProperties != null) + { + foreach (var item in _additionalBinaryDataProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + CopyAnalyzerRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected virtual CopyAnalyzerRequest JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CopyAnalyzerRequest)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeCopyAnalyzerRequest(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static CopyAnalyzerRequest DeserializeCopyAnalyzerRequest(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string sourceAzureResourceId = default; + string sourceRegion = default; + string sourceAnalyzerId = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("sourceAzureResourceId"u8)) + { + sourceAzureResourceId = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("sourceRegion"u8)) + { + sourceRegion = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("sourceAnalyzerId"u8)) + { + sourceAnalyzerId = prop.Value.GetString(); + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new CopyAnalyzerRequest(sourceAzureResourceId, sourceRegion, sourceAnalyzerId, additionalBinaryDataProperties); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(CopyAnalyzerRequest)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + CopyAnalyzerRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected virtual CopyAnalyzerRequest PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeCopyAnalyzerRequest(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(CopyAnalyzerRequest)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// The to serialize into . + public static implicit operator RequestContent(CopyAnalyzerRequest copyAnalyzerRequest) + { + if (copyAnalyzerRequest == null) + { + return null; + } + Utf8JsonRequestContent content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(copyAnalyzerRequest, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/CopyAnalyzerRequest.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/CopyAnalyzerRequest.cs new file mode 100644 index 000000000000..07fe702cadd0 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/CopyAnalyzerRequest.cs @@ -0,0 +1,48 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + /// The CopyAnalyzerRequest. + internal partial class CopyAnalyzerRequest + { + /// Keeps track of any properties unknown to the library. + private protected readonly IDictionary _additionalBinaryDataProperties; + + /// Initializes a new instance of . + /// Source analyzer ID. + internal CopyAnalyzerRequest(string sourceAnalyzerId) + { + SourceAnalyzerId = sourceAnalyzerId; + } + + /// Initializes a new instance of . + /// Azure resource ID of the source analyzer location. Defaults to the current resource. + /// Azure region of the source analyzer location. Defaults to current region. + /// Source analyzer ID. + /// Keeps track of any properties unknown to the library. + internal CopyAnalyzerRequest(string sourceAzureResourceId, string sourceRegion, string sourceAnalyzerId, IDictionary additionalBinaryDataProperties) + { + SourceAzureResourceId = sourceAzureResourceId; + SourceRegion = sourceRegion; + SourceAnalyzerId = sourceAnalyzerId; + _additionalBinaryDataProperties = additionalBinaryDataProperties; + } + + /// Azure resource ID of the source analyzer location. Defaults to the current resource. + public string SourceAzureResourceId { get; } + + /// Azure region of the source analyzer location. Defaults to current region. + public string SourceRegion { get; } + + /// Source analyzer ID. + public string SourceAnalyzerId { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/CopyAuthorization.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/CopyAuthorization.Serialization.cs new file mode 100644 index 000000000000..9ebd0e47a152 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/CopyAuthorization.Serialization.cs @@ -0,0 +1,166 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure; + +namespace Azure.AI.ContentUnderstanding +{ + /// Copy authorization details for cross-resource copy. + public partial class CopyAuthorization : IJsonModel + { + /// Initializes a new instance of for deserialization. + internal CopyAuthorization() + { + } + + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CopyAuthorization)} does not support writing '{format}' format."); + } + writer.WritePropertyName("source"u8); + writer.WriteStringValue(Source); + writer.WritePropertyName("targetAzureResourceId"u8); + writer.WriteStringValue(TargetAzureResourceId); + writer.WritePropertyName("expiresAt"u8); + writer.WriteStringValue(ExpiresAt, "O"); + if (options.Format != "W" && _additionalBinaryDataProperties != null) + { + foreach (var item in _additionalBinaryDataProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + CopyAuthorization IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected virtual CopyAuthorization JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CopyAuthorization)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeCopyAuthorization(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static CopyAuthorization DeserializeCopyAuthorization(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string source = default; + string targetAzureResourceId = default; + DateTimeOffset expiresAt = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("source"u8)) + { + source = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("targetAzureResourceId"u8)) + { + targetAzureResourceId = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("expiresAt"u8)) + { + expiresAt = prop.Value.GetDateTimeOffset("O"); + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new CopyAuthorization(source, targetAzureResourceId, expiresAt, additionalBinaryDataProperties); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(CopyAuthorization)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + CopyAuthorization IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected virtual CopyAuthorization PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeCopyAuthorization(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(CopyAuthorization)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// The to deserialize the from. + public static explicit operator CopyAuthorization(Response response) + { + using JsonDocument document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializeCopyAuthorization(document.RootElement, ModelSerializationExtensions.WireOptions); + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/CopyAuthorization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/CopyAuthorization.cs new file mode 100644 index 000000000000..f05d4b87a3f4 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/CopyAuthorization.cs @@ -0,0 +1,52 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + /// Copy authorization details for cross-resource copy. + public partial class CopyAuthorization + { + /// Keeps track of any properties unknown to the library. + private protected readonly IDictionary _additionalBinaryDataProperties; + + /// Initializes a new instance of . + /// Full path of the source analyzer. + /// Azure resource ID of the target location to copy to. + /// Date/time when the copy authorization expires. + internal CopyAuthorization(string source, string targetAzureResourceId, DateTimeOffset expiresAt) + { + Source = source; + TargetAzureResourceId = targetAzureResourceId; + ExpiresAt = expiresAt; + } + + /// Initializes a new instance of . + /// Full path of the source analyzer. + /// Azure resource ID of the target location to copy to. + /// Date/time when the copy authorization expires. + /// Keeps track of any properties unknown to the library. + internal CopyAuthorization(string source, string targetAzureResourceId, DateTimeOffset expiresAt, IDictionary additionalBinaryDataProperties) + { + Source = source; + TargetAzureResourceId = targetAzureResourceId; + ExpiresAt = expiresAt; + _additionalBinaryDataProperties = additionalBinaryDataProperties; + } + + /// Full path of the source analyzer. + public string Source { get; } + + /// Azure resource ID of the target location to copy to. + public string TargetAzureResourceId { get; } + + /// Date/time when the copy authorization expires. + public DateTimeOffset ExpiresAt { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DateField.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DateField.Serialization.cs new file mode 100644 index 000000000000..350eac984a9a --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DateField.Serialization.cs @@ -0,0 +1,182 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// Date field extracted from the content. + public partial class DateField : ContentField, IJsonModel + { + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DateField)} does not support writing '{format}' format."); + } + base.JsonModelWriteCore(writer, options); + writer.WritePropertyName("type"u8); + writer.WriteStringValue(FieldType.ToString()); + if (Optional.IsDefined(ValueDate)) + { + writer.WritePropertyName("valueDate"u8); + writer.WriteStringValue(ValueDate.Value, "D"); + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + DateField IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => (DateField)JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected override ContentField JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DateField)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeDateField(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static DateField DeserializeDateField(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + ContentFieldType @type = default; + IList spans = default; + float? confidence = default; + string source = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + ContentFieldType fieldType = default; + DateTimeOffset? valueDate = default; + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("type"u8)) + { + @type = new ContentFieldType(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("spans"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(ContentSpan.DeserializeContentSpan(item, options)); + } + spans = array; + continue; + } + if (prop.NameEquals("confidence"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + confidence = prop.Value.GetSingle(); + continue; + } + if (prop.NameEquals("source"u8)) + { + source = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("type"u8)) + { + fieldType = new ContentFieldType(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("valueDate"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + valueDate = prop.Value.GetDateTimeOffset("D"); + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new DateField( + @type, + spans ?? new ChangeTrackingList(), + confidence, + source, + additionalBinaryDataProperties, + fieldType, + valueDate); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(DateField)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + DateField IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => (DateField)PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected override ContentField PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeDateField(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(DateField)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DateField.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DateField.cs new file mode 100644 index 000000000000..725f475f5ff2 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DateField.cs @@ -0,0 +1,41 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + /// Date field extracted from the content. + public partial class DateField : ContentField + { + /// Initializes a new instance of . + internal DateField() : base(ContentFieldType.Date) + { + } + + /// Initializes a new instance of . + /// Semantic data type of the field value. + /// Span(s) associated with the field value in the markdown content. + /// Confidence of predicting the field value. + /// Encoded source that identifies the position of the field value in the content. + /// Keeps track of any properties unknown to the library. + /// Semantic data type of the field value. + /// Date field value, in ISO 8601 (YYYY-MM-DD) format. + internal DateField(ContentFieldType @type, IList spans, float? confidence, string source, IDictionary additionalBinaryDataProperties, ContentFieldType fieldType, DateTimeOffset? valueDate) : base(@type, spans, confidence, source, additionalBinaryDataProperties) + { + FieldType = fieldType; + ValueDate = valueDate; + } + + /// Semantic data type of the field value. + internal ContentFieldType FieldType { get; set; } = "date"; + + /// Date field value, in ISO 8601 (YYYY-MM-DD) format. + public DateTimeOffset? ValueDate { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentAnnotation.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentAnnotation.Serialization.cs new file mode 100644 index 000000000000..7a3e92b9b065 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentAnnotation.Serialization.cs @@ -0,0 +1,299 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// Annotation in a document, such as a strikethrough or a comment. + public partial class DocumentAnnotation : IJsonModel + { + /// Initializes a new instance of for deserialization. + internal DocumentAnnotation() + { + } + + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentAnnotation)} does not support writing '{format}' format."); + } + writer.WritePropertyName("id"u8); + writer.WriteStringValue(Id); + writer.WritePropertyName("kind"u8); + writer.WriteStringValue(Kind.ToString()); + if (Optional.IsCollectionDefined(Spans)) + { + writer.WritePropertyName("spans"u8); + writer.WriteStartArray(); + foreach (ContentSpan item in Spans) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (Optional.IsDefined(Source)) + { + writer.WritePropertyName("source"u8); + writer.WriteStringValue(Source); + } + if (Optional.IsCollectionDefined(Comments)) + { + writer.WritePropertyName("comments"u8); + writer.WriteStartArray(); + foreach (DocumentAnnotationComment item in Comments) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (Optional.IsDefined(Author)) + { + writer.WritePropertyName("author"u8); + writer.WriteStringValue(Author); + } + if (Optional.IsDefined(CreatedAt)) + { + writer.WritePropertyName("createdAt"u8); + writer.WriteStringValue(CreatedAt.Value, "O"); + } + if (Optional.IsDefined(LastModifiedAt)) + { + writer.WritePropertyName("lastModifiedAt"u8); + writer.WriteStringValue(LastModifiedAt.Value, "O"); + } + if (Optional.IsCollectionDefined(Tags)) + { + writer.WritePropertyName("tags"u8); + writer.WriteStartArray(); + foreach (string item in Tags) + { + if (item == null) + { + writer.WriteNullValue(); + continue; + } + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + } + if (options.Format != "W" && _additionalBinaryDataProperties != null) + { + foreach (var item in _additionalBinaryDataProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + DocumentAnnotation IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected virtual DocumentAnnotation JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentAnnotation)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeDocumentAnnotation(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static DocumentAnnotation DeserializeDocumentAnnotation(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string id = default; + DocumentAnnotationKind kind = default; + IList spans = default; + string source = default; + IList comments = default; + string author = default; + DateTimeOffset? createdAt = default; + DateTimeOffset? lastModifiedAt = default; + IList tags = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("id"u8)) + { + id = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("kind"u8)) + { + kind = new DocumentAnnotationKind(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("spans"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(ContentSpan.DeserializeContentSpan(item, options)); + } + spans = array; + continue; + } + if (prop.NameEquals("source"u8)) + { + source = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("comments"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(DocumentAnnotationComment.DeserializeDocumentAnnotationComment(item, options)); + } + comments = array; + continue; + } + if (prop.NameEquals("author"u8)) + { + author = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("createdAt"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + createdAt = prop.Value.GetDateTimeOffset("O"); + continue; + } + if (prop.NameEquals("lastModifiedAt"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + lastModifiedAt = prop.Value.GetDateTimeOffset("O"); + continue; + } + if (prop.NameEquals("tags"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + if (item.ValueKind == JsonValueKind.Null) + { + array.Add(null); + } + else + { + array.Add(item.GetString()); + } + } + tags = array; + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new DocumentAnnotation( + id, + kind, + spans ?? new ChangeTrackingList(), + source, + comments ?? new ChangeTrackingList(), + author, + createdAt, + lastModifiedAt, + tags ?? new ChangeTrackingList(), + additionalBinaryDataProperties); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(DocumentAnnotation)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + DocumentAnnotation IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected virtual DocumentAnnotation PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeDocumentAnnotation(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(DocumentAnnotation)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentAnnotation.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentAnnotation.cs new file mode 100644 index 000000000000..51ce9f393ace --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentAnnotation.cs @@ -0,0 +1,83 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + /// Annotation in a document, such as a strikethrough or a comment. + public partial class DocumentAnnotation + { + /// Keeps track of any properties unknown to the library. + private protected readonly IDictionary _additionalBinaryDataProperties; + + /// Initializes a new instance of . + /// Annotation identifier. + /// Annotation kind. + internal DocumentAnnotation(string id, DocumentAnnotationKind kind) + { + Id = id; + Kind = kind; + Spans = new ChangeTrackingList(); + Comments = new ChangeTrackingList(); + Tags = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// Annotation identifier. + /// Annotation kind. + /// Spans of the content associated with the annotation. + /// Position of the annotation. + /// Comments associated with the annotation. + /// Annotation author. + /// Date and time when the annotation was created. + /// Date and time when the annotation was last modified. + /// Tags associated with the annotation. + /// Keeps track of any properties unknown to the library. + internal DocumentAnnotation(string id, DocumentAnnotationKind kind, IList spans, string source, IList comments, string author, DateTimeOffset? createdAt, DateTimeOffset? lastModifiedAt, IList tags, IDictionary additionalBinaryDataProperties) + { + Id = id; + Kind = kind; + Spans = spans; + Source = source; + Comments = comments; + Author = author; + CreatedAt = createdAt; + LastModifiedAt = lastModifiedAt; + Tags = tags; + _additionalBinaryDataProperties = additionalBinaryDataProperties; + } + + /// Annotation identifier. + public string Id { get; } + + /// Annotation kind. + public DocumentAnnotationKind Kind { get; } + + /// Spans of the content associated with the annotation. + public IList Spans { get; } + + /// Position of the annotation. + public string Source { get; } + + /// Comments associated with the annotation. + public IList Comments { get; } + + /// Annotation author. + public string Author { get; } + + /// Date and time when the annotation was created. + public DateTimeOffset? CreatedAt { get; } + + /// Date and time when the annotation was last modified. + public DateTimeOffset? LastModifiedAt { get; } + + /// Tags associated with the annotation. + public IList Tags { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentAnnotationComment.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentAnnotationComment.Serialization.cs new file mode 100644 index 000000000000..cd223b920079 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentAnnotationComment.Serialization.cs @@ -0,0 +1,226 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// Comment associated with a document annotation. + public partial class DocumentAnnotationComment : IJsonModel + { + /// Initializes a new instance of for deserialization. + internal DocumentAnnotationComment() + { + } + + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentAnnotationComment)} does not support writing '{format}' format."); + } + writer.WritePropertyName("message"u8); + writer.WriteStringValue(Message); + if (Optional.IsDefined(Author)) + { + writer.WritePropertyName("author"u8); + writer.WriteStringValue(Author); + } + if (Optional.IsDefined(CreatedAt)) + { + writer.WritePropertyName("createdAt"u8); + writer.WriteStringValue(CreatedAt.Value, "O"); + } + if (Optional.IsDefined(LastModifiedAt)) + { + writer.WritePropertyName("lastModifiedAt"u8); + writer.WriteStringValue(LastModifiedAt.Value, "O"); + } + if (Optional.IsCollectionDefined(Tags)) + { + writer.WritePropertyName("tags"u8); + writer.WriteStartArray(); + foreach (string item in Tags) + { + if (item == null) + { + writer.WriteNullValue(); + continue; + } + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + } + if (options.Format != "W" && _additionalBinaryDataProperties != null) + { + foreach (var item in _additionalBinaryDataProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + DocumentAnnotationComment IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected virtual DocumentAnnotationComment JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentAnnotationComment)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeDocumentAnnotationComment(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static DocumentAnnotationComment DeserializeDocumentAnnotationComment(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string message = default; + string author = default; + DateTimeOffset? createdAt = default; + DateTimeOffset? lastModifiedAt = default; + IList tags = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("message"u8)) + { + message = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("author"u8)) + { + author = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("createdAt"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + createdAt = prop.Value.GetDateTimeOffset("O"); + continue; + } + if (prop.NameEquals("lastModifiedAt"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + lastModifiedAt = prop.Value.GetDateTimeOffset("O"); + continue; + } + if (prop.NameEquals("tags"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + if (item.ValueKind == JsonValueKind.Null) + { + array.Add(null); + } + else + { + array.Add(item.GetString()); + } + } + tags = array; + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new DocumentAnnotationComment( + message, + author, + createdAt, + lastModifiedAt, + tags ?? new ChangeTrackingList(), + additionalBinaryDataProperties); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(DocumentAnnotationComment)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + DocumentAnnotationComment IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected virtual DocumentAnnotationComment PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeDocumentAnnotationComment(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(DocumentAnnotationComment)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentAnnotationComment.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentAnnotationComment.cs new file mode 100644 index 000000000000..e5b7bb17c25c --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentAnnotationComment.cs @@ -0,0 +1,59 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + /// Comment associated with a document annotation. + public partial class DocumentAnnotationComment + { + /// Keeps track of any properties unknown to the library. + private protected readonly IDictionary _additionalBinaryDataProperties; + + /// Initializes a new instance of . + /// Comment message in Markdown. + internal DocumentAnnotationComment(string message) + { + Message = message; + Tags = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// Comment message in Markdown. + /// Author of the comment. + /// Date and time when the comment was created. + /// Date and time when the comment was last modified. + /// Tags associated with the comment. + /// Keeps track of any properties unknown to the library. + internal DocumentAnnotationComment(string message, string author, DateTimeOffset? createdAt, DateTimeOffset? lastModifiedAt, IList tags, IDictionary additionalBinaryDataProperties) + { + Message = message; + Author = author; + CreatedAt = createdAt; + LastModifiedAt = lastModifiedAt; + Tags = tags; + _additionalBinaryDataProperties = additionalBinaryDataProperties; + } + + /// Comment message in Markdown. + public string Message { get; } + + /// Author of the comment. + public string Author { get; } + + /// Date and time when the comment was created. + public DateTimeOffset? CreatedAt { get; } + + /// Date and time when the comment was last modified. + public DateTimeOffset? LastModifiedAt { get; } + + /// Tags associated with the comment. + public IList Tags { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentAnnotationKind.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentAnnotationKind.cs new file mode 100644 index 000000000000..3ece21e969ae --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentAnnotationKind.cs @@ -0,0 +1,95 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.AI.ContentUnderstanding +{ + /// Document annotation kind. + public readonly partial struct DocumentAnnotationKind : IEquatable + { + private readonly string _value; + /// Highlight annotation. + private const string HighlightValue = "highlight"; + /// Strikethrough annotation. + private const string StrikethroughValue = "strikethrough"; + /// Underline annotation. + private const string UnderlineValue = "underline"; + /// Italic annotation. + private const string ItalicValue = "italic"; + /// Bold annotation. + private const string BoldValue = "bold"; + /// Circle annotation. + private const string CircleValue = "circle"; + /// Note annotation. + private const string NoteValue = "note"; + + /// Initializes a new instance of . + /// The value. + /// is null. + public DocumentAnnotationKind(string value) + { + Argument.AssertNotNull(value, nameof(value)); + + _value = value; + } + + /// Highlight annotation. + public static DocumentAnnotationKind Highlight { get; } = new DocumentAnnotationKind(HighlightValue); + + /// Strikethrough annotation. + public static DocumentAnnotationKind Strikethrough { get; } = new DocumentAnnotationKind(StrikethroughValue); + + /// Underline annotation. + public static DocumentAnnotationKind Underline { get; } = new DocumentAnnotationKind(UnderlineValue); + + /// Italic annotation. + public static DocumentAnnotationKind Italic { get; } = new DocumentAnnotationKind(ItalicValue); + + /// Bold annotation. + public static DocumentAnnotationKind Bold { get; } = new DocumentAnnotationKind(BoldValue); + + /// Circle annotation. + public static DocumentAnnotationKind Circle { get; } = new DocumentAnnotationKind(CircleValue); + + /// Note annotation. + public static DocumentAnnotationKind Note { get; } = new DocumentAnnotationKind(NoteValue); + + /// Determines if two values are the same. + /// The left value to compare. + /// The right value to compare. + public static bool operator ==(DocumentAnnotationKind left, DocumentAnnotationKind right) => left.Equals(right); + + /// Determines if two values are not the same. + /// The left value to compare. + /// The right value to compare. + public static bool operator !=(DocumentAnnotationKind left, DocumentAnnotationKind right) => !left.Equals(right); + + /// Converts a string to a . + /// The value. + public static implicit operator DocumentAnnotationKind(string value) => new DocumentAnnotationKind(value); + + /// Converts a string to a . + /// The value. + public static implicit operator DocumentAnnotationKind?(string value) => value == null ? null : new DocumentAnnotationKind(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is DocumentAnnotationKind other && Equals(other); + + /// + public bool Equals(DocumentAnnotationKind other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + + /// + public override string ToString() => _value; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentBarcode.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentBarcode.Serialization.cs new file mode 100644 index 000000000000..d901630f3651 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentBarcode.Serialization.cs @@ -0,0 +1,197 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// Barcode in a document. + public partial class DocumentBarcode : IJsonModel + { + /// Initializes a new instance of for deserialization. + internal DocumentBarcode() + { + } + + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentBarcode)} does not support writing '{format}' format."); + } + writer.WritePropertyName("kind"u8); + writer.WriteStringValue(Kind.ToString()); + writer.WritePropertyName("value"u8); + writer.WriteStringValue(Value); + if (Optional.IsDefined(Source)) + { + writer.WritePropertyName("source"u8); + writer.WriteStringValue(Source); + } + if (Optional.IsDefined(Span)) + { + writer.WritePropertyName("span"u8); + writer.WriteObjectValue(Span, options); + } + if (Optional.IsDefined(Confidence)) + { + writer.WritePropertyName("confidence"u8); + writer.WriteNumberValue(Confidence.Value); + } + if (options.Format != "W" && _additionalBinaryDataProperties != null) + { + foreach (var item in _additionalBinaryDataProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + DocumentBarcode IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected virtual DocumentBarcode JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentBarcode)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeDocumentBarcode(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static DocumentBarcode DeserializeDocumentBarcode(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + DocumentBarcodeKind kind = default; + string value = default; + string source = default; + ContentSpan span = default; + float? confidence = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("kind"u8)) + { + kind = new DocumentBarcodeKind(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("value"u8)) + { + value = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("source"u8)) + { + source = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("span"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + span = ContentSpan.DeserializeContentSpan(prop.Value, options); + continue; + } + if (prop.NameEquals("confidence"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + confidence = prop.Value.GetSingle(); + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new DocumentBarcode( + kind, + value, + source, + span, + confidence, + additionalBinaryDataProperties); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(DocumentBarcode)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + DocumentBarcode IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected virtual DocumentBarcode PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeDocumentBarcode(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(DocumentBarcode)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentBarcode.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentBarcode.cs new file mode 100644 index 000000000000..6561c08fb1c4 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentBarcode.cs @@ -0,0 +1,60 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + /// Barcode in a document. + public partial class DocumentBarcode + { + /// Keeps track of any properties unknown to the library. + private protected readonly IDictionary _additionalBinaryDataProperties; + + /// Initializes a new instance of . + /// Barcode kind. + /// Barcode value. + internal DocumentBarcode(DocumentBarcodeKind kind, string value) + { + Kind = kind; + Value = value; + } + + /// Initializes a new instance of . + /// Barcode kind. + /// Barcode value. + /// Encoded source that identifies the position of the barcode in the content. + /// Span of the barcode in the markdown content. + /// Confidence of predicting the barcode. + /// Keeps track of any properties unknown to the library. + internal DocumentBarcode(DocumentBarcodeKind kind, string value, string source, ContentSpan span, float? confidence, IDictionary additionalBinaryDataProperties) + { + Kind = kind; + Value = value; + Source = source; + Span = span; + Confidence = confidence; + _additionalBinaryDataProperties = additionalBinaryDataProperties; + } + + /// Barcode kind. + public DocumentBarcodeKind Kind { get; } + + /// Barcode value. + public string Value { get; } + + /// Encoded source that identifies the position of the barcode in the content. + public string Source { get; } + + /// Span of the barcode in the markdown content. + public ContentSpan Span { get; } + + /// Confidence of predicting the barcode. + public float? Confidence { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentBarcodeKind.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentBarcodeKind.cs new file mode 100644 index 000000000000..32b1e4f013db --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentBarcodeKind.cs @@ -0,0 +1,145 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.AI.ContentUnderstanding +{ + /// Barcode kind. + public readonly partial struct DocumentBarcodeKind : IEquatable + { + private readonly string _value; + /// QR code, as defined in ISO/IEC 18004:2015. + private const string QRCodeValue = "QRCode"; + /// PDF417, as defined in ISO 15438. + private const string PDF417Value = "PDF417"; + /// GS1 12-digit Universal Product Code. + private const string UPCAValue = "UPCA"; + /// GS1 6-digit Universal Product Code. + private const string UPCEValue = "UPCE"; + /// Code 39 barcode, as defined in ISO/IEC 16388:2007. + private const string Code39Value = "Code39"; + /// Code 128 barcode, as defined in ISO/IEC 15417:2007. + private const string Code128Value = "Code128"; + /// GS1 8-digit International Article Number (European Article Number). + private const string EAN8Value = "EAN8"; + /// GS1 13-digit International Article Number (European Article Number). + private const string EAN13Value = "EAN13"; + /// GS1 DataBar barcode. + private const string DataBarValue = "DataBar"; + /// Code 93 barcode, as defined in ANSI/AIM BC5-1995. + private const string Code93Value = "Code93"; + /// Codabar barcode, as defined in ANSI/AIM BC3-1995. + private const string CodabarValue = "Codabar"; + /// GS1 DataBar Expanded barcode. + private const string DataBarExpandedValue = "DataBarExpanded"; + /// Interleaved 2 of 5 barcode, as defined in ANSI/AIM BC2-1995. + private const string ITFValue = "ITF"; + /// Micro QR code, as defined in ISO/IEC 23941:2022. + private const string MicroQRCodeValue = "MicroQRCode"; + /// Aztec code, as defined in ISO/IEC 24778:2008. + private const string AztecValue = "Aztec"; + /// Data matrix code, as defined in ISO/IEC 16022:2006. + private const string DataMatrixValue = "DataMatrix"; + /// MaxiCode, as defined in ISO/IEC 16023:2000. + private const string MaxiCodeValue = "MaxiCode"; + + /// Initializes a new instance of . + /// The value. + /// is null. + public DocumentBarcodeKind(string value) + { + Argument.AssertNotNull(value, nameof(value)); + + _value = value; + } + + /// QR code, as defined in ISO/IEC 18004:2015. + public static DocumentBarcodeKind QRCode { get; } = new DocumentBarcodeKind(QRCodeValue); + + /// PDF417, as defined in ISO 15438. + public static DocumentBarcodeKind PDF417 { get; } = new DocumentBarcodeKind(PDF417Value); + + /// GS1 12-digit Universal Product Code. + public static DocumentBarcodeKind UPCA { get; } = new DocumentBarcodeKind(UPCAValue); + + /// GS1 6-digit Universal Product Code. + public static DocumentBarcodeKind UPCE { get; } = new DocumentBarcodeKind(UPCEValue); + + /// Code 39 barcode, as defined in ISO/IEC 16388:2007. + public static DocumentBarcodeKind Code39 { get; } = new DocumentBarcodeKind(Code39Value); + + /// Code 128 barcode, as defined in ISO/IEC 15417:2007. + public static DocumentBarcodeKind Code128 { get; } = new DocumentBarcodeKind(Code128Value); + + /// GS1 8-digit International Article Number (European Article Number). + public static DocumentBarcodeKind EAN8 { get; } = new DocumentBarcodeKind(EAN8Value); + + /// GS1 13-digit International Article Number (European Article Number). + public static DocumentBarcodeKind EAN13 { get; } = new DocumentBarcodeKind(EAN13Value); + + /// GS1 DataBar barcode. + public static DocumentBarcodeKind DataBar { get; } = new DocumentBarcodeKind(DataBarValue); + + /// Code 93 barcode, as defined in ANSI/AIM BC5-1995. + public static DocumentBarcodeKind Code93 { get; } = new DocumentBarcodeKind(Code93Value); + + /// Codabar barcode, as defined in ANSI/AIM BC3-1995. + public static DocumentBarcodeKind Codabar { get; } = new DocumentBarcodeKind(CodabarValue); + + /// GS1 DataBar Expanded barcode. + public static DocumentBarcodeKind DataBarExpanded { get; } = new DocumentBarcodeKind(DataBarExpandedValue); + + /// Interleaved 2 of 5 barcode, as defined in ANSI/AIM BC2-1995. + public static DocumentBarcodeKind ITF { get; } = new DocumentBarcodeKind(ITFValue); + + /// Micro QR code, as defined in ISO/IEC 23941:2022. + public static DocumentBarcodeKind MicroQRCode { get; } = new DocumentBarcodeKind(MicroQRCodeValue); + + /// Aztec code, as defined in ISO/IEC 24778:2008. + public static DocumentBarcodeKind Aztec { get; } = new DocumentBarcodeKind(AztecValue); + + /// Data matrix code, as defined in ISO/IEC 16022:2006. + public static DocumentBarcodeKind DataMatrix { get; } = new DocumentBarcodeKind(DataMatrixValue); + + /// MaxiCode, as defined in ISO/IEC 16023:2000. + public static DocumentBarcodeKind MaxiCode { get; } = new DocumentBarcodeKind(MaxiCodeValue); + + /// Determines if two values are the same. + /// The left value to compare. + /// The right value to compare. + public static bool operator ==(DocumentBarcodeKind left, DocumentBarcodeKind right) => left.Equals(right); + + /// Determines if two values are not the same. + /// The left value to compare. + /// The right value to compare. + public static bool operator !=(DocumentBarcodeKind left, DocumentBarcodeKind right) => !left.Equals(right); + + /// Converts a string to a . + /// The value. + public static implicit operator DocumentBarcodeKind(string value) => new DocumentBarcodeKind(value); + + /// Converts a string to a . + /// The value. + public static implicit operator DocumentBarcodeKind?(string value) => value == null ? null : new DocumentBarcodeKind(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is DocumentBarcodeKind other && Equals(other); + + /// + public bool Equals(DocumentBarcodeKind other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + + /// + public override string ToString() => _value; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentCaption.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentCaption.Serialization.cs new file mode 100644 index 000000000000..2fe7431739d2 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentCaption.Serialization.cs @@ -0,0 +1,205 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// Caption of a table or figure. + public partial class DocumentCaption : IJsonModel + { + /// Initializes a new instance of for deserialization. + internal DocumentCaption() + { + } + + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentCaption)} does not support writing '{format}' format."); + } + writer.WritePropertyName("content"u8); + writer.WriteStringValue(Content); + if (Optional.IsDefined(Source)) + { + writer.WritePropertyName("source"u8); + writer.WriteStringValue(Source); + } + if (Optional.IsDefined(Span)) + { + writer.WritePropertyName("span"u8); + writer.WriteObjectValue(Span, options); + } + if (Optional.IsCollectionDefined(Elements)) + { + writer.WritePropertyName("elements"u8); + writer.WriteStartArray(); + foreach (string item in Elements) + { + if (item == null) + { + writer.WriteNullValue(); + continue; + } + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + } + if (options.Format != "W" && _additionalBinaryDataProperties != null) + { + foreach (var item in _additionalBinaryDataProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + DocumentCaption IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected virtual DocumentCaption JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentCaption)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeDocumentCaption(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static DocumentCaption DeserializeDocumentCaption(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string content = default; + string source = default; + ContentSpan span = default; + IList elements = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("content"u8)) + { + content = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("source"u8)) + { + source = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("span"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + span = ContentSpan.DeserializeContentSpan(prop.Value, options); + continue; + } + if (prop.NameEquals("elements"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + if (item.ValueKind == JsonValueKind.Null) + { + array.Add(null); + } + else + { + array.Add(item.GetString()); + } + } + elements = array; + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new DocumentCaption(content, source, span, elements ?? new ChangeTrackingList(), additionalBinaryDataProperties); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(DocumentCaption)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + DocumentCaption IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected virtual DocumentCaption PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeDocumentCaption(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(DocumentCaption)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentCaption.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentCaption.cs new file mode 100644 index 000000000000..c0ed1c677de9 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentCaption.cs @@ -0,0 +1,54 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + /// Caption of a table or figure. + public partial class DocumentCaption + { + /// Keeps track of any properties unknown to the library. + private protected readonly IDictionary _additionalBinaryDataProperties; + + /// Initializes a new instance of . + /// Content of the caption. + internal DocumentCaption(string content) + { + Content = content; + Elements = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// Content of the caption. + /// Encoded source that identifies the position of the caption in the content. + /// Span of the caption in the markdown content. + /// Child elements of the caption. + /// Keeps track of any properties unknown to the library. + internal DocumentCaption(string content, string source, ContentSpan span, IList elements, IDictionary additionalBinaryDataProperties) + { + Content = content; + Source = source; + Span = span; + Elements = elements; + _additionalBinaryDataProperties = additionalBinaryDataProperties; + } + + /// Content of the caption. + public string Content { get; } + + /// Encoded source that identifies the position of the caption in the content. + public string Source { get; } + + /// Span of the caption in the markdown content. + public ContentSpan Span { get; } + + /// Child elements of the caption. + public IList Elements { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentChartFigure.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentChartFigure.Serialization.cs new file mode 100644 index 000000000000..0057583bfc2b --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentChartFigure.Serialization.cs @@ -0,0 +1,260 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// Figure containing a chart, such as a bar chart, line chart, or pie chart. + public partial class DocumentChartFigure : DocumentFigure, IJsonModel + { + /// Initializes a new instance of for deserialization. + internal DocumentChartFigure() + { + } + + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentChartFigure)} does not support writing '{format}' format."); + } + base.JsonModelWriteCore(writer, options); + writer.WritePropertyName("content"u8); + writer.WriteStartObject(); + foreach (var item in Content) + { + writer.WritePropertyName(item.Key); + if (item.Value == null) + { + writer.WriteNullValue(); + continue; + } +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + writer.WriteEndObject(); + } + + /// The JSON reader. + /// The client options for reading and writing models. + DocumentChartFigure IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => (DocumentChartFigure)JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected override DocumentFigure JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentChartFigure)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeDocumentChartFigure(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static DocumentChartFigure DeserializeDocumentChartFigure(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + DocumentFigureKind kind = default; + string id = default; + string source = default; + ContentSpan span = default; + IList elements = default; + DocumentCaption caption = default; + IList footnotes = default; + string description = default; + SemanticRole? role = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + IDictionary content = default; + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("kind"u8)) + { + kind = new DocumentFigureKind(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("id"u8)) + { + id = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("source"u8)) + { + source = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("span"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + span = ContentSpan.DeserializeContentSpan(prop.Value, options); + continue; + } + if (prop.NameEquals("elements"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + if (item.ValueKind == JsonValueKind.Null) + { + array.Add(null); + } + else + { + array.Add(item.GetString()); + } + } + elements = array; + continue; + } + if (prop.NameEquals("caption"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + caption = DocumentCaption.DeserializeDocumentCaption(prop.Value, options); + continue; + } + if (prop.NameEquals("footnotes"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(DocumentFootnote.DeserializeDocumentFootnote(item, options)); + } + footnotes = array; + continue; + } + if (prop.NameEquals("description"u8)) + { + description = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("role"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + role = new SemanticRole(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("content"u8)) + { + Dictionary dictionary = new Dictionary(); + foreach (var prop0 in prop.Value.EnumerateObject()) + { + if (prop0.Value.ValueKind == JsonValueKind.Null) + { + dictionary.Add(prop0.Name, null); + } + else + { + dictionary.Add(prop0.Name, BinaryData.FromString(prop0.Value.GetRawText())); + } + } + content = dictionary; + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new DocumentChartFigure( + kind, + id, + source, + span, + elements ?? new ChangeTrackingList(), + caption, + footnotes ?? new ChangeTrackingList(), + description, + role, + additionalBinaryDataProperties, + content); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(DocumentChartFigure)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + DocumentChartFigure IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => (DocumentChartFigure)PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected override DocumentFigure PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeDocumentChartFigure(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(DocumentChartFigure)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentChartFigure.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentChartFigure.cs new file mode 100644 index 000000000000..bcd1d033d3e1 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentChartFigure.cs @@ -0,0 +1,70 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// Figure containing a chart, such as a bar chart, line chart, or pie chart. + public partial class DocumentChartFigure : DocumentFigure + { + /// Initializes a new instance of . + /// Figure identifier. + /// Chart content represented using [Chart.js config](https://www.chartjs.org/docs/latest/configuration/). + internal DocumentChartFigure(string id, IDictionary content) : base(DocumentFigureKind.Chart, id) + { + Content = content; + } + + /// Initializes a new instance of . + /// Figure kind. + /// Figure identifier. + /// Encoded source that identifies the position of the figure in the content. + /// Span of the figure in the markdown content. + /// Child elements of the figure, excluding any caption or footnotes. + /// Figure caption. + /// List of figure footnotes. + /// Description of the figure. + /// Semantic role of the figure. + /// Keeps track of any properties unknown to the library. + /// Chart content represented using [Chart.js config](https://www.chartjs.org/docs/latest/configuration/). + internal DocumentChartFigure(DocumentFigureKind kind, string id, string source, ContentSpan span, IList elements, DocumentCaption caption, IList footnotes, string description, SemanticRole? role, IDictionary additionalBinaryDataProperties, IDictionary content) : base(kind, id, source, span, elements, caption, footnotes, description, role, additionalBinaryDataProperties) + { + Content = content; + } + + /// + /// Chart content represented using [Chart.js config](https://www.chartjs.org/docs/latest/configuration/). + /// To assign an object to the value of this property use . + /// To assign an already formatted json string to this property use . + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo"). + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\""). + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }). + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}"). + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + public IDictionary Content { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentContent.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentContent.Serialization.cs new file mode 100644 index 000000000000..d3f86a00b98f --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentContent.Serialization.cs @@ -0,0 +1,421 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// Document content. Ex. text/plain, application/pdf, image/jpeg. + public partial class DocumentContent : MediaContent, IJsonModel + { + /// Initializes a new instance of for deserialization. + internal DocumentContent() + { + } + + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentContent)} does not support writing '{format}' format."); + } + base.JsonModelWriteCore(writer, options); + writer.WritePropertyName("startPageNumber"u8); + writer.WriteNumberValue(StartPageNumber); + writer.WritePropertyName("endPageNumber"u8); + writer.WriteNumberValue(EndPageNumber); + if (Optional.IsDefined(Unit)) + { + writer.WritePropertyName("unit"u8); + writer.WriteStringValue(Unit.Value.ToString()); + } + if (Optional.IsCollectionDefined(Pages)) + { + writer.WritePropertyName("pages"u8); + writer.WriteStartArray(); + foreach (DocumentPage item in Pages) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (Optional.IsCollectionDefined(Paragraphs)) + { + writer.WritePropertyName("paragraphs"u8); + writer.WriteStartArray(); + foreach (DocumentParagraph item in Paragraphs) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (Optional.IsCollectionDefined(Sections)) + { + writer.WritePropertyName("sections"u8); + writer.WriteStartArray(); + foreach (DocumentSection item in Sections) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (Optional.IsCollectionDefined(Tables)) + { + writer.WritePropertyName("tables"u8); + writer.WriteStartArray(); + foreach (DocumentTable item in Tables) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (Optional.IsCollectionDefined(Figures)) + { + writer.WritePropertyName("figures"u8); + writer.WriteStartArray(); + foreach (DocumentFigure item in Figures) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (Optional.IsCollectionDefined(Annotations)) + { + writer.WritePropertyName("annotations"u8); + writer.WriteStartArray(); + foreach (DocumentAnnotation item in Annotations) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (Optional.IsCollectionDefined(Hyperlinks)) + { + writer.WritePropertyName("hyperlinks"u8); + writer.WriteStartArray(); + foreach (DocumentHyperlink item in Hyperlinks) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (Optional.IsCollectionDefined(Segments)) + { + writer.WritePropertyName("segments"u8); + writer.WriteStartArray(); + foreach (DocumentContentSegment item in Segments) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + DocumentContent IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => (DocumentContent)JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected override MediaContent JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentContent)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeDocumentContent(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static DocumentContent DeserializeDocumentContent(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + MediaContentKind kind = default; + string mimeType = default; + string analyzerId = default; + string category = default; + string path = default; + string markdown = default; + IDictionary fields = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + int startPageNumber = default; + int endPageNumber = default; + LengthUnit? unit = default; + IList pages = default; + IList paragraphs = default; + IList sections = default; + IList tables = default; + IList figures = default; + IList annotations = default; + IList hyperlinks = default; + IList segments = default; + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("kind"u8)) + { + kind = new MediaContentKind(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("mimeType"u8)) + { + mimeType = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("analyzerId"u8)) + { + analyzerId = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("category"u8)) + { + category = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("path"u8)) + { + path = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("markdown"u8)) + { + markdown = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("fields"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + Dictionary dictionary = new Dictionary(); + foreach (var prop0 in prop.Value.EnumerateObject()) + { + dictionary.Add(prop0.Name, ContentField.DeserializeContentField(prop0.Value, options)); + } + fields = dictionary; + continue; + } + if (prop.NameEquals("startPageNumber"u8)) + { + startPageNumber = prop.Value.GetInt32(); + continue; + } + if (prop.NameEquals("endPageNumber"u8)) + { + endPageNumber = prop.Value.GetInt32(); + continue; + } + if (prop.NameEquals("unit"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + unit = new LengthUnit(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("pages"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(DocumentPage.DeserializeDocumentPage(item, options)); + } + pages = array; + continue; + } + if (prop.NameEquals("paragraphs"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(DocumentParagraph.DeserializeDocumentParagraph(item, options)); + } + paragraphs = array; + continue; + } + if (prop.NameEquals("sections"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(DocumentSection.DeserializeDocumentSection(item, options)); + } + sections = array; + continue; + } + if (prop.NameEquals("tables"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(DocumentTable.DeserializeDocumentTable(item, options)); + } + tables = array; + continue; + } + if (prop.NameEquals("figures"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(DocumentFigure.DeserializeDocumentFigure(item, options)); + } + figures = array; + continue; + } + if (prop.NameEquals("annotations"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(DocumentAnnotation.DeserializeDocumentAnnotation(item, options)); + } + annotations = array; + continue; + } + if (prop.NameEquals("hyperlinks"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(DocumentHyperlink.DeserializeDocumentHyperlink(item, options)); + } + hyperlinks = array; + continue; + } + if (prop.NameEquals("segments"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(DocumentContentSegment.DeserializeDocumentContentSegment(item, options)); + } + segments = array; + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new DocumentContent( + kind, + mimeType, + analyzerId, + category, + path, + markdown, + fields ?? new ChangeTrackingDictionary(), + additionalBinaryDataProperties, + startPageNumber, + endPageNumber, + unit, + pages ?? new ChangeTrackingList(), + paragraphs ?? new ChangeTrackingList(), + sections ?? new ChangeTrackingList(), + tables ?? new ChangeTrackingList(), + figures ?? new ChangeTrackingList(), + annotations ?? new ChangeTrackingList(), + hyperlinks ?? new ChangeTrackingList(), + segments ?? new ChangeTrackingList()); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(DocumentContent)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + DocumentContent IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => (DocumentContent)PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected override MediaContent PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeDocumentContent(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(DocumentContent)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentContent.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentContent.cs new file mode 100644 index 000000000000..0e2d69bf1cbc --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentContent.cs @@ -0,0 +1,108 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + /// Document content. Ex. text/plain, application/pdf, image/jpeg. + public partial class DocumentContent : MediaContent + { + /// Initializes a new instance of . + /// Detected MIME type of the content. Ex. application/pdf, image/jpeg, etc. + /// Start page number (1-indexed) of the content. + /// End page number (1-indexed) of the content. + internal DocumentContent(string mimeType, int startPageNumber, int endPageNumber) : base(MediaContentKind.Document, mimeType) + { + StartPageNumber = startPageNumber; + EndPageNumber = endPageNumber; + Pages = new ChangeTrackingList(); + Paragraphs = new ChangeTrackingList(); + Sections = new ChangeTrackingList(); + Tables = new ChangeTrackingList(); + Figures = new ChangeTrackingList(); + Annotations = new ChangeTrackingList(); + Hyperlinks = new ChangeTrackingList(); + Segments = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// Content kind. + /// Detected MIME type of the content. Ex. application/pdf, image/jpeg, etc. + /// The analyzer that generated this content. + /// Classified content category. + /// The path of the content in the input. + /// Markdown representation of the content. + /// Extracted fields from the content. + /// Keeps track of any properties unknown to the library. + /// Start page number (1-indexed) of the content. + /// End page number (1-indexed) of the content. + /// + /// Length unit used by the width, height, and source properties. + /// For images/tiff, the default unit is pixel. For PDF, the default unit is inch. + /// + /// List of pages in the document. + /// List of paragraphs in the document. Only if enableOcr and returnDetails are true. + /// List of sections in the document. Only if enableLayout and returnDetails are true. + /// List of tables in the document. Only if enableLayout and returnDetails are true. + /// List of figures in the document. Only if enableLayout and returnDetails are true. + /// List of annotations in the document. Only if enableAnnotations and returnDetails are true. + /// List of hyperlinks in the document. Only if returnDetails are true. + /// List of detected content segments. Only if enableSegment is true. + internal DocumentContent(MediaContentKind kind, string mimeType, string analyzerId, string category, string path, string markdown, IDictionary fields, IDictionary additionalBinaryDataProperties, int startPageNumber, int endPageNumber, LengthUnit? unit, IList pages, IList paragraphs, IList sections, IList tables, IList figures, IList annotations, IList hyperlinks, IList segments) : base(kind, mimeType, analyzerId, category, path, markdown, fields, additionalBinaryDataProperties) + { + StartPageNumber = startPageNumber; + EndPageNumber = endPageNumber; + Unit = unit; + Pages = pages; + Paragraphs = paragraphs; + Sections = sections; + Tables = tables; + Figures = figures; + Annotations = annotations; + Hyperlinks = hyperlinks; + Segments = segments; + } + + /// Start page number (1-indexed) of the content. + public int StartPageNumber { get; } + + /// End page number (1-indexed) of the content. + public int EndPageNumber { get; } + + /// + /// Length unit used by the width, height, and source properties. + /// For images/tiff, the default unit is pixel. For PDF, the default unit is inch. + /// + public LengthUnit? Unit { get; } + + /// List of pages in the document. + public IList Pages { get; } + + /// List of paragraphs in the document. Only if enableOcr and returnDetails are true. + public IList Paragraphs { get; } + + /// List of sections in the document. Only if enableLayout and returnDetails are true. + public IList Sections { get; } + + /// List of tables in the document. Only if enableLayout and returnDetails are true. + public IList Tables { get; } + + /// List of figures in the document. Only if enableLayout and returnDetails are true. + public IList Figures { get; } + + /// List of annotations in the document. Only if enableAnnotations and returnDetails are true. + public IList Annotations { get; } + + /// List of hyperlinks in the document. Only if returnDetails are true. + public IList Hyperlinks { get; } + + /// List of detected content segments. Only if enableSegment is true. + public IList Segments { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentContentSegment.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentContentSegment.Serialization.cs new file mode 100644 index 000000000000..a36f64c5758f --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentContentSegment.Serialization.cs @@ -0,0 +1,180 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// Detected document content segment. + public partial class DocumentContentSegment : IJsonModel + { + /// Initializes a new instance of for deserialization. + internal DocumentContentSegment() + { + } + + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentContentSegment)} does not support writing '{format}' format."); + } + writer.WritePropertyName("segmentId"u8); + writer.WriteStringValue(SegmentId); + writer.WritePropertyName("category"u8); + writer.WriteStringValue(Category); + writer.WritePropertyName("span"u8); + writer.WriteObjectValue(Span, options); + writer.WritePropertyName("startPageNumber"u8); + writer.WriteNumberValue(StartPageNumber); + writer.WritePropertyName("endPageNumber"u8); + writer.WriteNumberValue(EndPageNumber); + if (options.Format != "W" && _additionalBinaryDataProperties != null) + { + foreach (var item in _additionalBinaryDataProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + DocumentContentSegment IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected virtual DocumentContentSegment JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentContentSegment)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeDocumentContentSegment(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static DocumentContentSegment DeserializeDocumentContentSegment(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string segmentId = default; + string category = default; + ContentSpan span = default; + int startPageNumber = default; + int endPageNumber = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("segmentId"u8)) + { + segmentId = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("category"u8)) + { + category = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("span"u8)) + { + span = ContentSpan.DeserializeContentSpan(prop.Value, options); + continue; + } + if (prop.NameEquals("startPageNumber"u8)) + { + startPageNumber = prop.Value.GetInt32(); + continue; + } + if (prop.NameEquals("endPageNumber"u8)) + { + endPageNumber = prop.Value.GetInt32(); + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new DocumentContentSegment( + segmentId, + category, + span, + startPageNumber, + endPageNumber, + additionalBinaryDataProperties); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(DocumentContentSegment)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + DocumentContentSegment IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected virtual DocumentContentSegment PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeDocumentContentSegment(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(DocumentContentSegment)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentContentSegment.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentContentSegment.cs new file mode 100644 index 000000000000..9d81f60f4721 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentContentSegment.cs @@ -0,0 +1,66 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + /// Detected document content segment. + public partial class DocumentContentSegment + { + /// Keeps track of any properties unknown to the library. + private protected readonly IDictionary _additionalBinaryDataProperties; + + /// Initializes a new instance of . + /// Segment identifier. + /// Classified content category. + /// Span of the segment in the markdown content. + /// Start page number (1-indexed) of the segment. + /// End page number (1-indexed) of the segment. + internal DocumentContentSegment(string segmentId, string category, ContentSpan span, int startPageNumber, int endPageNumber) + { + SegmentId = segmentId; + Category = category; + Span = span; + StartPageNumber = startPageNumber; + EndPageNumber = endPageNumber; + } + + /// Initializes a new instance of . + /// Segment identifier. + /// Classified content category. + /// Span of the segment in the markdown content. + /// Start page number (1-indexed) of the segment. + /// End page number (1-indexed) of the segment. + /// Keeps track of any properties unknown to the library. + internal DocumentContentSegment(string segmentId, string category, ContentSpan span, int startPageNumber, int endPageNumber, IDictionary additionalBinaryDataProperties) + { + SegmentId = segmentId; + Category = category; + Span = span; + StartPageNumber = startPageNumber; + EndPageNumber = endPageNumber; + _additionalBinaryDataProperties = additionalBinaryDataProperties; + } + + /// Segment identifier. + public string SegmentId { get; } + + /// Classified content category. + public string Category { get; } + + /// Span of the segment in the markdown content. + public ContentSpan Span { get; } + + /// Start page number (1-indexed) of the segment. + public int StartPageNumber { get; } + + /// End page number (1-indexed) of the segment. + public int EndPageNumber { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentFigure.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentFigure.Serialization.cs new file mode 100644 index 000000000000..9cac9fb8299a --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentFigure.Serialization.cs @@ -0,0 +1,193 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// + /// Figure in a document. + /// Please note this is the abstract base class. The derived classes available for instantiation are: and . + /// + [PersistableModelProxy(typeof(UnknownDocumentFigure))] + public abstract partial class DocumentFigure : IJsonModel + { + /// Initializes a new instance of for deserialization. + internal DocumentFigure() + { + } + + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentFigure)} does not support writing '{format}' format."); + } + writer.WritePropertyName("kind"u8); + writer.WriteStringValue(Kind.ToString()); + writer.WritePropertyName("id"u8); + writer.WriteStringValue(Id); + if (Optional.IsDefined(Source)) + { + writer.WritePropertyName("source"u8); + writer.WriteStringValue(Source); + } + if (Optional.IsDefined(Span)) + { + writer.WritePropertyName("span"u8); + writer.WriteObjectValue(Span, options); + } + if (Optional.IsCollectionDefined(Elements)) + { + writer.WritePropertyName("elements"u8); + writer.WriteStartArray(); + foreach (string item in Elements) + { + if (item == null) + { + writer.WriteNullValue(); + continue; + } + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + } + if (Optional.IsDefined(Caption)) + { + writer.WritePropertyName("caption"u8); + writer.WriteObjectValue(Caption, options); + } + if (Optional.IsCollectionDefined(Footnotes)) + { + writer.WritePropertyName("footnotes"u8); + writer.WriteStartArray(); + foreach (DocumentFootnote item in Footnotes) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (Optional.IsDefined(Description)) + { + writer.WritePropertyName("description"u8); + writer.WriteStringValue(Description); + } + if (Optional.IsDefined(Role)) + { + writer.WritePropertyName("role"u8); + writer.WriteStringValue(Role.Value.ToString()); + } + if (options.Format != "W" && _additionalBinaryDataProperties != null) + { + foreach (var item in _additionalBinaryDataProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + DocumentFigure IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected virtual DocumentFigure JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentFigure)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeDocumentFigure(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static DocumentFigure DeserializeDocumentFigure(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + if (element.TryGetProperty("kind"u8, out JsonElement discriminator)) + { + switch (discriminator.GetString()) + { + case "chart": + return DocumentChartFigure.DeserializeDocumentChartFigure(element, options); + case "mermaid": + return DocumentMermaidFigure.DeserializeDocumentMermaidFigure(element, options); + } + } + return UnknownDocumentFigure.DeserializeUnknownDocumentFigure(element, options); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(DocumentFigure)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + DocumentFigure IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected virtual DocumentFigure PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeDocumentFigure(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(DocumentFigure)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentFigure.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentFigure.cs new file mode 100644 index 000000000000..f396bfa793b0 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentFigure.cs @@ -0,0 +1,85 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + /// + /// Figure in a document. + /// Please note this is the abstract base class. The derived classes available for instantiation are: and . + /// + public abstract partial class DocumentFigure + { + /// Keeps track of any properties unknown to the library. + private protected readonly IDictionary _additionalBinaryDataProperties; + + /// Initializes a new instance of . + /// Figure kind. + /// Figure identifier. + private protected DocumentFigure(DocumentFigureKind kind, string id) + { + Kind = kind; + Id = id; + Elements = new ChangeTrackingList(); + Footnotes = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// Figure kind. + /// Figure identifier. + /// Encoded source that identifies the position of the figure in the content. + /// Span of the figure in the markdown content. + /// Child elements of the figure, excluding any caption or footnotes. + /// Figure caption. + /// List of figure footnotes. + /// Description of the figure. + /// Semantic role of the figure. + /// Keeps track of any properties unknown to the library. + internal DocumentFigure(DocumentFigureKind kind, string id, string source, ContentSpan span, IList elements, DocumentCaption caption, IList footnotes, string description, SemanticRole? role, IDictionary additionalBinaryDataProperties) + { + Kind = kind; + Id = id; + Source = source; + Span = span; + Elements = elements; + Caption = caption; + Footnotes = footnotes; + Description = description; + Role = role; + _additionalBinaryDataProperties = additionalBinaryDataProperties; + } + + /// Figure kind. + internal DocumentFigureKind Kind { get; set; } + + /// Figure identifier. + public string Id { get; } + + /// Encoded source that identifies the position of the figure in the content. + public string Source { get; } + + /// Span of the figure in the markdown content. + public ContentSpan Span { get; } + + /// Child elements of the figure, excluding any caption or footnotes. + public IList Elements { get; } + + /// Figure caption. + public DocumentCaption Caption { get; } + + /// List of figure footnotes. + public IList Footnotes { get; } + + /// Description of the figure. + public string Description { get; } + + /// Semantic role of the figure. + public SemanticRole? Role { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentFigureKind.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentFigureKind.cs new file mode 100644 index 000000000000..fb9fc4f1f5d2 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentFigureKind.cs @@ -0,0 +1,75 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.AI.ContentUnderstanding +{ + /// Figure kind. + internal readonly partial struct DocumentFigureKind : IEquatable + { + private readonly string _value; + /// Unknown figure kind. + private const string UnknownValue = "unknown"; + /// Figure containing a chart, such as a bar chart, line chart, or pie chart. + private const string ChartValue = "chart"; + /// Figure containing a diagram, such as a flowchart or network diagram. + private const string MermaidValue = "mermaid"; + + /// Initializes a new instance of . + /// The value. + /// is null. + public DocumentFigureKind(string value) + { + Argument.AssertNotNull(value, nameof(value)); + + _value = value; + } + + /// Unknown figure kind. + public static DocumentFigureKind Unknown { get; } = new DocumentFigureKind(UnknownValue); + + /// Figure containing a chart, such as a bar chart, line chart, or pie chart. + public static DocumentFigureKind Chart { get; } = new DocumentFigureKind(ChartValue); + + /// Figure containing a diagram, such as a flowchart or network diagram. + public static DocumentFigureKind Mermaid { get; } = new DocumentFigureKind(MermaidValue); + + /// Determines if two values are the same. + /// The left value to compare. + /// The right value to compare. + public static bool operator ==(DocumentFigureKind left, DocumentFigureKind right) => left.Equals(right); + + /// Determines if two values are not the same. + /// The left value to compare. + /// The right value to compare. + public static bool operator !=(DocumentFigureKind left, DocumentFigureKind right) => !left.Equals(right); + + /// Converts a string to a . + /// The value. + public static implicit operator DocumentFigureKind(string value) => new DocumentFigureKind(value); + + /// Converts a string to a . + /// The value. + public static implicit operator DocumentFigureKind?(string value) => value == null ? null : new DocumentFigureKind(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is DocumentFigureKind other && Equals(other); + + /// + public bool Equals(DocumentFigureKind other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + + /// + public override string ToString() => _value; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentFootnote.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentFootnote.Serialization.cs new file mode 100644 index 000000000000..808809e3445f --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentFootnote.Serialization.cs @@ -0,0 +1,205 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// Footnote of a table or figure. + public partial class DocumentFootnote : IJsonModel + { + /// Initializes a new instance of for deserialization. + internal DocumentFootnote() + { + } + + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentFootnote)} does not support writing '{format}' format."); + } + writer.WritePropertyName("content"u8); + writer.WriteStringValue(Content); + if (Optional.IsDefined(Source)) + { + writer.WritePropertyName("source"u8); + writer.WriteStringValue(Source); + } + if (Optional.IsDefined(Span)) + { + writer.WritePropertyName("span"u8); + writer.WriteObjectValue(Span, options); + } + if (Optional.IsCollectionDefined(Elements)) + { + writer.WritePropertyName("elements"u8); + writer.WriteStartArray(); + foreach (string item in Elements) + { + if (item == null) + { + writer.WriteNullValue(); + continue; + } + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + } + if (options.Format != "W" && _additionalBinaryDataProperties != null) + { + foreach (var item in _additionalBinaryDataProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + DocumentFootnote IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected virtual DocumentFootnote JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentFootnote)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeDocumentFootnote(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static DocumentFootnote DeserializeDocumentFootnote(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string content = default; + string source = default; + ContentSpan span = default; + IList elements = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("content"u8)) + { + content = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("source"u8)) + { + source = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("span"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + span = ContentSpan.DeserializeContentSpan(prop.Value, options); + continue; + } + if (prop.NameEquals("elements"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + if (item.ValueKind == JsonValueKind.Null) + { + array.Add(null); + } + else + { + array.Add(item.GetString()); + } + } + elements = array; + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new DocumentFootnote(content, source, span, elements ?? new ChangeTrackingList(), additionalBinaryDataProperties); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(DocumentFootnote)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + DocumentFootnote IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected virtual DocumentFootnote PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeDocumentFootnote(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(DocumentFootnote)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentFootnote.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentFootnote.cs new file mode 100644 index 000000000000..40fa1433fb75 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentFootnote.cs @@ -0,0 +1,54 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + /// Footnote of a table or figure. + public partial class DocumentFootnote + { + /// Keeps track of any properties unknown to the library. + private protected readonly IDictionary _additionalBinaryDataProperties; + + /// Initializes a new instance of . + /// Content of the footnote. + internal DocumentFootnote(string content) + { + Content = content; + Elements = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// Content of the footnote. + /// Encoded source that identifies the position of the footnote in the content. + /// Span of the footnote in the markdown content. + /// Child elements of the footnote. + /// Keeps track of any properties unknown to the library. + internal DocumentFootnote(string content, string source, ContentSpan span, IList elements, IDictionary additionalBinaryDataProperties) + { + Content = content; + Source = source; + Span = span; + Elements = elements; + _additionalBinaryDataProperties = additionalBinaryDataProperties; + } + + /// Content of the footnote. + public string Content { get; } + + /// Encoded source that identifies the position of the footnote in the content. + public string Source { get; } + + /// Span of the footnote in the markdown content. + public ContentSpan Span { get; } + + /// Child elements of the footnote. + public IList Elements { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentFormula.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentFormula.Serialization.cs new file mode 100644 index 000000000000..bbadc85d82b4 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentFormula.Serialization.cs @@ -0,0 +1,197 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// Mathematical formula in a document. + public partial class DocumentFormula : IJsonModel + { + /// Initializes a new instance of for deserialization. + internal DocumentFormula() + { + } + + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentFormula)} does not support writing '{format}' format."); + } + writer.WritePropertyName("kind"u8); + writer.WriteStringValue(Kind.ToString()); + writer.WritePropertyName("value"u8); + writer.WriteStringValue(Value); + if (Optional.IsDefined(Source)) + { + writer.WritePropertyName("source"u8); + writer.WriteStringValue(Source); + } + if (Optional.IsDefined(Span)) + { + writer.WritePropertyName("span"u8); + writer.WriteObjectValue(Span, options); + } + if (Optional.IsDefined(Confidence)) + { + writer.WritePropertyName("confidence"u8); + writer.WriteNumberValue(Confidence.Value); + } + if (options.Format != "W" && _additionalBinaryDataProperties != null) + { + foreach (var item in _additionalBinaryDataProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + DocumentFormula IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected virtual DocumentFormula JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentFormula)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeDocumentFormula(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static DocumentFormula DeserializeDocumentFormula(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + DocumentFormulaKind kind = default; + string value = default; + string source = default; + ContentSpan span = default; + float? confidence = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("kind"u8)) + { + kind = new DocumentFormulaKind(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("value"u8)) + { + value = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("source"u8)) + { + source = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("span"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + span = ContentSpan.DeserializeContentSpan(prop.Value, options); + continue; + } + if (prop.NameEquals("confidence"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + confidence = prop.Value.GetSingle(); + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new DocumentFormula( + kind, + value, + source, + span, + confidence, + additionalBinaryDataProperties); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(DocumentFormula)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + DocumentFormula IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected virtual DocumentFormula PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeDocumentFormula(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(DocumentFormula)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentFormula.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentFormula.cs new file mode 100644 index 000000000000..da55c56eda71 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentFormula.cs @@ -0,0 +1,60 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + /// Mathematical formula in a document. + public partial class DocumentFormula + { + /// Keeps track of any properties unknown to the library. + private protected readonly IDictionary _additionalBinaryDataProperties; + + /// Initializes a new instance of . + /// Formula kind. + /// LaTex expression describing the formula. + internal DocumentFormula(DocumentFormulaKind kind, string value) + { + Kind = kind; + Value = value; + } + + /// Initializes a new instance of . + /// Formula kind. + /// LaTex expression describing the formula. + /// Encoded source that identifies the position of the formula in the content. + /// Span of the formula in the markdown content. + /// Confidence of predicting the formula. + /// Keeps track of any properties unknown to the library. + internal DocumentFormula(DocumentFormulaKind kind, string value, string source, ContentSpan span, float? confidence, IDictionary additionalBinaryDataProperties) + { + Kind = kind; + Value = value; + Source = source; + Span = span; + Confidence = confidence; + _additionalBinaryDataProperties = additionalBinaryDataProperties; + } + + /// Formula kind. + public DocumentFormulaKind Kind { get; } + + /// LaTex expression describing the formula. + public string Value { get; } + + /// Encoded source that identifies the position of the formula in the content. + public string Source { get; } + + /// Span of the formula in the markdown content. + public ContentSpan Span { get; } + + /// Confidence of predicting the formula. + public float? Confidence { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentFormulaKind.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentFormulaKind.cs new file mode 100644 index 000000000000..25b98d8fe46e --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentFormulaKind.cs @@ -0,0 +1,70 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.AI.ContentUnderstanding +{ + /// Formula kind. + public readonly partial struct DocumentFormulaKind : IEquatable + { + private readonly string _value; + /// A formula embedded within the content of a paragraph. + private const string InlineValue = "inline"; + /// A formula in display mode that takes up an entire line. + private const string DisplayValue = "display"; + + /// Initializes a new instance of . + /// The value. + /// is null. + public DocumentFormulaKind(string value) + { + Argument.AssertNotNull(value, nameof(value)); + + _value = value; + } + + /// A formula embedded within the content of a paragraph. + public static DocumentFormulaKind Inline { get; } = new DocumentFormulaKind(InlineValue); + + /// A formula in display mode that takes up an entire line. + public static DocumentFormulaKind Display { get; } = new DocumentFormulaKind(DisplayValue); + + /// Determines if two values are the same. + /// The left value to compare. + /// The right value to compare. + public static bool operator ==(DocumentFormulaKind left, DocumentFormulaKind right) => left.Equals(right); + + /// Determines if two values are not the same. + /// The left value to compare. + /// The right value to compare. + public static bool operator !=(DocumentFormulaKind left, DocumentFormulaKind right) => !left.Equals(right); + + /// Converts a string to a . + /// The value. + public static implicit operator DocumentFormulaKind(string value) => new DocumentFormulaKind(value); + + /// Converts a string to a . + /// The value. + public static implicit operator DocumentFormulaKind?(string value) => value == null ? null : new DocumentFormulaKind(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is DocumentFormulaKind other && Equals(other); + + /// + public bool Equals(DocumentFormulaKind other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + + /// + public override string ToString() => _value; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentHyperlink.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentHyperlink.Serialization.cs new file mode 100644 index 000000000000..c11144b63c35 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentHyperlink.Serialization.cs @@ -0,0 +1,176 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// Hyperlink in a document, such as a link to a web page or an email address. + public partial class DocumentHyperlink : IJsonModel + { + /// Initializes a new instance of for deserialization. + internal DocumentHyperlink() + { + } + + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentHyperlink)} does not support writing '{format}' format."); + } + writer.WritePropertyName("content"u8); + writer.WriteStringValue(Content); + writer.WritePropertyName("url"u8); + writer.WriteStringValue(Url); + if (Optional.IsDefined(Span)) + { + writer.WritePropertyName("span"u8); + writer.WriteObjectValue(Span, options); + } + if (Optional.IsDefined(Source)) + { + writer.WritePropertyName("source"u8); + writer.WriteStringValue(Source); + } + if (options.Format != "W" && _additionalBinaryDataProperties != null) + { + foreach (var item in _additionalBinaryDataProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + DocumentHyperlink IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected virtual DocumentHyperlink JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentHyperlink)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeDocumentHyperlink(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static DocumentHyperlink DeserializeDocumentHyperlink(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string content = default; + string url = default; + ContentSpan span = default; + string source = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("content"u8)) + { + content = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("url"u8)) + { + url = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("span"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + span = ContentSpan.DeserializeContentSpan(prop.Value, options); + continue; + } + if (prop.NameEquals("source"u8)) + { + source = prop.Value.GetString(); + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new DocumentHyperlink(content, url, span, source, additionalBinaryDataProperties); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(DocumentHyperlink)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + DocumentHyperlink IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected virtual DocumentHyperlink PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeDocumentHyperlink(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(DocumentHyperlink)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentHyperlink.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentHyperlink.cs new file mode 100644 index 000000000000..1160a01689c8 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentHyperlink.cs @@ -0,0 +1,55 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + /// Hyperlink in a document, such as a link to a web page or an email address. + public partial class DocumentHyperlink + { + /// Keeps track of any properties unknown to the library. + private protected readonly IDictionary _additionalBinaryDataProperties; + + /// Initializes a new instance of . + /// Hyperlinked content. + /// URL of the hyperlink. + internal DocumentHyperlink(string content, string url) + { + Content = content; + Url = url; + } + + /// Initializes a new instance of . + /// Hyperlinked content. + /// URL of the hyperlink. + /// Span of the hyperlink in the markdown content. + /// Position of the hyperlink. + /// Keeps track of any properties unknown to the library. + internal DocumentHyperlink(string content, string url, ContentSpan span, string source, IDictionary additionalBinaryDataProperties) + { + Content = content; + Url = url; + Span = span; + Source = source; + _additionalBinaryDataProperties = additionalBinaryDataProperties; + } + + /// Hyperlinked content. + public string Content { get; } + + /// URL of the hyperlink. + public string Url { get; } + + /// Span of the hyperlink in the markdown content. + public ContentSpan Span { get; } + + /// Position of the hyperlink. + public string Source { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentLine.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentLine.Serialization.cs new file mode 100644 index 000000000000..123a6607562e --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentLine.Serialization.cs @@ -0,0 +1,168 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// Line in a document, consisting of an contiguous sequence of words. + public partial class DocumentLine : IJsonModel + { + /// Initializes a new instance of for deserialization. + internal DocumentLine() + { + } + + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentLine)} does not support writing '{format}' format."); + } + writer.WritePropertyName("content"u8); + writer.WriteStringValue(Content); + if (Optional.IsDefined(Source)) + { + writer.WritePropertyName("source"u8); + writer.WriteStringValue(Source); + } + if (Optional.IsDefined(Span)) + { + writer.WritePropertyName("span"u8); + writer.WriteObjectValue(Span, options); + } + if (options.Format != "W" && _additionalBinaryDataProperties != null) + { + foreach (var item in _additionalBinaryDataProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + DocumentLine IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected virtual DocumentLine JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentLine)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeDocumentLine(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static DocumentLine DeserializeDocumentLine(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string content = default; + string source = default; + ContentSpan span = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("content"u8)) + { + content = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("source"u8)) + { + source = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("span"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + span = ContentSpan.DeserializeContentSpan(prop.Value, options); + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new DocumentLine(content, source, span, additionalBinaryDataProperties); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(DocumentLine)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + DocumentLine IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected virtual DocumentLine PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeDocumentLine(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(DocumentLine)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentLine.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentLine.cs new file mode 100644 index 000000000000..9cfd43583d8a --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentLine.cs @@ -0,0 +1,48 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + /// Line in a document, consisting of an contiguous sequence of words. + public partial class DocumentLine + { + /// Keeps track of any properties unknown to the library. + private protected readonly IDictionary _additionalBinaryDataProperties; + + /// Initializes a new instance of . + /// Line text. + internal DocumentLine(string content) + { + Content = content; + } + + /// Initializes a new instance of . + /// Line text. + /// Encoded source that identifies the position of the line in the content. + /// Span of the line in the markdown content. + /// Keeps track of any properties unknown to the library. + internal DocumentLine(string content, string source, ContentSpan span, IDictionary additionalBinaryDataProperties) + { + Content = content; + Source = source; + Span = span; + _additionalBinaryDataProperties = additionalBinaryDataProperties; + } + + /// Line text. + public string Content { get; } + + /// Encoded source that identifies the position of the line in the content. + public string Source { get; } + + /// Span of the line in the markdown content. + public ContentSpan Span { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentMermaidFigure.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentMermaidFigure.Serialization.cs new file mode 100644 index 000000000000..6141907a79cd --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentMermaidFigure.Serialization.cs @@ -0,0 +1,230 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// Figure containing a diagram, such as a flowchart or network diagram. + public partial class DocumentMermaidFigure : DocumentFigure, IJsonModel + { + /// Initializes a new instance of for deserialization. + internal DocumentMermaidFigure() + { + } + + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentMermaidFigure)} does not support writing '{format}' format."); + } + base.JsonModelWriteCore(writer, options); + writer.WritePropertyName("content"u8); + writer.WriteStringValue(Content); + } + + /// The JSON reader. + /// The client options for reading and writing models. + DocumentMermaidFigure IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => (DocumentMermaidFigure)JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected override DocumentFigure JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentMermaidFigure)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeDocumentMermaidFigure(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static DocumentMermaidFigure DeserializeDocumentMermaidFigure(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + DocumentFigureKind kind = default; + string id = default; + string source = default; + ContentSpan span = default; + IList elements = default; + DocumentCaption caption = default; + IList footnotes = default; + string description = default; + SemanticRole? role = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + string content = default; + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("kind"u8)) + { + kind = new DocumentFigureKind(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("id"u8)) + { + id = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("source"u8)) + { + source = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("span"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + span = ContentSpan.DeserializeContentSpan(prop.Value, options); + continue; + } + if (prop.NameEquals("elements"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + if (item.ValueKind == JsonValueKind.Null) + { + array.Add(null); + } + else + { + array.Add(item.GetString()); + } + } + elements = array; + continue; + } + if (prop.NameEquals("caption"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + caption = DocumentCaption.DeserializeDocumentCaption(prop.Value, options); + continue; + } + if (prop.NameEquals("footnotes"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(DocumentFootnote.DeserializeDocumentFootnote(item, options)); + } + footnotes = array; + continue; + } + if (prop.NameEquals("description"u8)) + { + description = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("role"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + role = new SemanticRole(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("content"u8)) + { + content = prop.Value.GetString(); + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new DocumentMermaidFigure( + kind, + id, + source, + span, + elements ?? new ChangeTrackingList(), + caption, + footnotes ?? new ChangeTrackingList(), + description, + role, + additionalBinaryDataProperties, + content); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(DocumentMermaidFigure)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + DocumentMermaidFigure IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => (DocumentMermaidFigure)PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected override DocumentFigure PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeDocumentMermaidFigure(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(DocumentMermaidFigure)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentMermaidFigure.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentMermaidFigure.cs new file mode 100644 index 000000000000..bda0309c59f4 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentMermaidFigure.cs @@ -0,0 +1,44 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + /// Figure containing a diagram, such as a flowchart or network diagram. + public partial class DocumentMermaidFigure : DocumentFigure + { + /// Initializes a new instance of . + /// Figure identifier. + /// Diagram content represented using [Mermaid syntax](https://mermaid.js.org/intro/). + internal DocumentMermaidFigure(string id, string content) : base(DocumentFigureKind.Mermaid, id) + { + Content = content; + } + + /// Initializes a new instance of . + /// Figure kind. + /// Figure identifier. + /// Encoded source that identifies the position of the figure in the content. + /// Span of the figure in the markdown content. + /// Child elements of the figure, excluding any caption or footnotes. + /// Figure caption. + /// List of figure footnotes. + /// Description of the figure. + /// Semantic role of the figure. + /// Keeps track of any properties unknown to the library. + /// Diagram content represented using [Mermaid syntax](https://mermaid.js.org/intro/). + internal DocumentMermaidFigure(DocumentFigureKind kind, string id, string source, ContentSpan span, IList elements, DocumentCaption caption, IList footnotes, string description, SemanticRole? role, IDictionary additionalBinaryDataProperties, string content) : base(kind, id, source, span, elements, caption, footnotes, description, role, additionalBinaryDataProperties) + { + Content = content; + } + + /// Diagram content represented using [Mermaid syntax](https://mermaid.js.org/intro/). + public string Content { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentPage.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentPage.Serialization.cs new file mode 100644 index 000000000000..9fcfa5cdd83c --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentPage.Serialization.cs @@ -0,0 +1,322 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// Content from a document page. + public partial class DocumentPage : IJsonModel + { + /// Initializes a new instance of for deserialization. + internal DocumentPage() + { + } + + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentPage)} does not support writing '{format}' format."); + } + writer.WritePropertyName("pageNumber"u8); + writer.WriteNumberValue(PageNumber); + if (Optional.IsDefined(Width)) + { + writer.WritePropertyName("width"u8); + writer.WriteNumberValue(Width.Value); + } + if (Optional.IsDefined(Height)) + { + writer.WritePropertyName("height"u8); + writer.WriteNumberValue(Height.Value); + } + if (Optional.IsCollectionDefined(Spans)) + { + writer.WritePropertyName("spans"u8); + writer.WriteStartArray(); + foreach (ContentSpan item in Spans) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (Optional.IsDefined(Angle)) + { + writer.WritePropertyName("angle"u8); + writer.WriteNumberValue(Angle.Value); + } + if (Optional.IsCollectionDefined(Words)) + { + writer.WritePropertyName("words"u8); + writer.WriteStartArray(); + foreach (DocumentWord item in Words) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (Optional.IsCollectionDefined(Lines)) + { + writer.WritePropertyName("lines"u8); + writer.WriteStartArray(); + foreach (DocumentLine item in Lines) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (Optional.IsCollectionDefined(Barcodes)) + { + writer.WritePropertyName("barcodes"u8); + writer.WriteStartArray(); + foreach (DocumentBarcode item in Barcodes) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (Optional.IsCollectionDefined(Formulas)) + { + writer.WritePropertyName("formulas"u8); + writer.WriteStartArray(); + foreach (DocumentFormula item in Formulas) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (options.Format != "W" && _additionalBinaryDataProperties != null) + { + foreach (var item in _additionalBinaryDataProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + DocumentPage IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected virtual DocumentPage JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentPage)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeDocumentPage(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static DocumentPage DeserializeDocumentPage(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + int pageNumber = default; + float? width = default; + float? height = default; + IList spans = default; + float? angle = default; + IList words = default; + IList lines = default; + IList barcodes = default; + IList formulas = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("pageNumber"u8)) + { + pageNumber = prop.Value.GetInt32(); + continue; + } + if (prop.NameEquals("width"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + width = prop.Value.GetSingle(); + continue; + } + if (prop.NameEquals("height"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + height = prop.Value.GetSingle(); + continue; + } + if (prop.NameEquals("spans"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(ContentSpan.DeserializeContentSpan(item, options)); + } + spans = array; + continue; + } + if (prop.NameEquals("angle"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + angle = prop.Value.GetSingle(); + continue; + } + if (prop.NameEquals("words"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(DocumentWord.DeserializeDocumentWord(item, options)); + } + words = array; + continue; + } + if (prop.NameEquals("lines"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(DocumentLine.DeserializeDocumentLine(item, options)); + } + lines = array; + continue; + } + if (prop.NameEquals("barcodes"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(DocumentBarcode.DeserializeDocumentBarcode(item, options)); + } + barcodes = array; + continue; + } + if (prop.NameEquals("formulas"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(DocumentFormula.DeserializeDocumentFormula(item, options)); + } + formulas = array; + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new DocumentPage( + pageNumber, + width, + height, + spans ?? new ChangeTrackingList(), + angle, + words ?? new ChangeTrackingList(), + lines ?? new ChangeTrackingList(), + barcodes ?? new ChangeTrackingList(), + formulas ?? new ChangeTrackingList(), + additionalBinaryDataProperties); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(DocumentPage)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + DocumentPage IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected virtual DocumentPage PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeDocumentPage(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(DocumentPage)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentPage.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentPage.cs new file mode 100644 index 000000000000..db2e358ab932 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentPage.cs @@ -0,0 +1,91 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + /// Content from a document page. + public partial class DocumentPage + { + /// Keeps track of any properties unknown to the library. + private protected readonly IDictionary _additionalBinaryDataProperties; + + /// Initializes a new instance of . + /// Page number (1-based). + internal DocumentPage(int pageNumber) + { + PageNumber = pageNumber; + Spans = new ChangeTrackingList(); + Words = new ChangeTrackingList(); + Lines = new ChangeTrackingList(); + Barcodes = new ChangeTrackingList(); + Formulas = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// Page number (1-based). + /// Width of the page. + /// Height of the page. + /// Span(s) associated with the page in the markdown content. + /// + /// The general orientation of the content in clockwise direction, + /// measured in degrees between (-180, 180]. + /// Only if enableOcr is true. + /// + /// List of words in the page. Only if enableOcr and returnDetails are true. + /// List of lines in the page. Only if enableOcr and returnDetails are true. + /// List of barcodes in the page. Only if enableBarcode and returnDetails are true. + /// List of mathematical formulas in the page. Only if enableFormula and returnDetails are true. + /// Keeps track of any properties unknown to the library. + internal DocumentPage(int pageNumber, float? width, float? height, IList spans, float? angle, IList words, IList lines, IList barcodes, IList formulas, IDictionary additionalBinaryDataProperties) + { + PageNumber = pageNumber; + Width = width; + Height = height; + Spans = spans; + Angle = angle; + Words = words; + Lines = lines; + Barcodes = barcodes; + Formulas = formulas; + _additionalBinaryDataProperties = additionalBinaryDataProperties; + } + + /// Page number (1-based). + public int PageNumber { get; } + + /// Width of the page. + public float? Width { get; } + + /// Height of the page. + public float? Height { get; } + + /// Span(s) associated with the page in the markdown content. + public IList Spans { get; } + + /// + /// The general orientation of the content in clockwise direction, + /// measured in degrees between (-180, 180]. + /// Only if enableOcr is true. + /// + public float? Angle { get; } + + /// List of words in the page. Only if enableOcr and returnDetails are true. + public IList Words { get; } + + /// List of lines in the page. Only if enableOcr and returnDetails are true. + public IList Lines { get; } + + /// List of barcodes in the page. Only if enableBarcode and returnDetails are true. + public IList Barcodes { get; } + + /// List of mathematical formulas in the page. Only if enableFormula and returnDetails are true. + public IList Formulas { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentParagraph.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentParagraph.Serialization.cs new file mode 100644 index 000000000000..3c0e1094badc --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentParagraph.Serialization.cs @@ -0,0 +1,186 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// + /// Paragraph in a document, generally consisting of an contiguous sequence of lines + /// with common alignment and spacing. + /// + public partial class DocumentParagraph : IJsonModel + { + /// Initializes a new instance of for deserialization. + internal DocumentParagraph() + { + } + + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentParagraph)} does not support writing '{format}' format."); + } + if (Optional.IsDefined(Role)) + { + writer.WritePropertyName("role"u8); + writer.WriteStringValue(Role.Value.ToString()); + } + writer.WritePropertyName("content"u8); + writer.WriteStringValue(Content); + if (Optional.IsDefined(Source)) + { + writer.WritePropertyName("source"u8); + writer.WriteStringValue(Source); + } + if (Optional.IsDefined(Span)) + { + writer.WritePropertyName("span"u8); + writer.WriteObjectValue(Span, options); + } + if (options.Format != "W" && _additionalBinaryDataProperties != null) + { + foreach (var item in _additionalBinaryDataProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + DocumentParagraph IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected virtual DocumentParagraph JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentParagraph)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeDocumentParagraph(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static DocumentParagraph DeserializeDocumentParagraph(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + SemanticRole? role = default; + string content = default; + string source = default; + ContentSpan span = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("role"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + role = new SemanticRole(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("content"u8)) + { + content = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("source"u8)) + { + source = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("span"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + span = ContentSpan.DeserializeContentSpan(prop.Value, options); + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new DocumentParagraph(role, content, source, span, additionalBinaryDataProperties); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(DocumentParagraph)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + DocumentParagraph IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected virtual DocumentParagraph PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeDocumentParagraph(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(DocumentParagraph)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentParagraph.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentParagraph.cs new file mode 100644 index 000000000000..b81705b4e9f2 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentParagraph.cs @@ -0,0 +1,56 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + /// + /// Paragraph in a document, generally consisting of an contiguous sequence of lines + /// with common alignment and spacing. + /// + public partial class DocumentParagraph + { + /// Keeps track of any properties unknown to the library. + private protected readonly IDictionary _additionalBinaryDataProperties; + + /// Initializes a new instance of . + /// Paragraph text. + internal DocumentParagraph(string content) + { + Content = content; + } + + /// Initializes a new instance of . + /// Semantic role of the paragraph. + /// Paragraph text. + /// Encoded source that identifies the position of the paragraph in the content. + /// Span of the paragraph in the markdown content. + /// Keeps track of any properties unknown to the library. + internal DocumentParagraph(SemanticRole? role, string content, string source, ContentSpan span, IDictionary additionalBinaryDataProperties) + { + Role = role; + Content = content; + Source = source; + Span = span; + _additionalBinaryDataProperties = additionalBinaryDataProperties; + } + + /// Semantic role of the paragraph. + public SemanticRole? Role { get; } + + /// Paragraph text. + public string Content { get; } + + /// Encoded source that identifies the position of the paragraph in the content. + public string Source { get; } + + /// Span of the paragraph in the markdown content. + public ContentSpan Span { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentSection.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentSection.Serialization.cs new file mode 100644 index 000000000000..0ce9fff35776 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentSection.Serialization.cs @@ -0,0 +1,181 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// Section in a document. + public partial class DocumentSection : IJsonModel + { + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentSection)} does not support writing '{format}' format."); + } + if (Optional.IsDefined(Span)) + { + writer.WritePropertyName("span"u8); + writer.WriteObjectValue(Span, options); + } + if (Optional.IsCollectionDefined(Elements)) + { + writer.WritePropertyName("elements"u8); + writer.WriteStartArray(); + foreach (string item in Elements) + { + if (item == null) + { + writer.WriteNullValue(); + continue; + } + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + } + if (options.Format != "W" && _additionalBinaryDataProperties != null) + { + foreach (var item in _additionalBinaryDataProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + DocumentSection IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected virtual DocumentSection JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentSection)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeDocumentSection(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static DocumentSection DeserializeDocumentSection(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + ContentSpan span = default; + IList elements = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("span"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + span = ContentSpan.DeserializeContentSpan(prop.Value, options); + continue; + } + if (prop.NameEquals("elements"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + if (item.ValueKind == JsonValueKind.Null) + { + array.Add(null); + } + else + { + array.Add(item.GetString()); + } + } + elements = array; + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new DocumentSection(span, elements ?? new ChangeTrackingList(), additionalBinaryDataProperties); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(DocumentSection)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + DocumentSection IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected virtual DocumentSection PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeDocumentSection(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(DocumentSection)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentSection.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentSection.cs new file mode 100644 index 000000000000..580c00e0d687 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentSection.cs @@ -0,0 +1,42 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + /// Section in a document. + public partial class DocumentSection + { + /// Keeps track of any properties unknown to the library. + private protected readonly IDictionary _additionalBinaryDataProperties; + + /// Initializes a new instance of . + internal DocumentSection() + { + Elements = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// Span of the section in the markdown content. + /// Child elements of the section. + /// Keeps track of any properties unknown to the library. + internal DocumentSection(ContentSpan span, IList elements, IDictionary additionalBinaryDataProperties) + { + Span = span; + Elements = elements; + _additionalBinaryDataProperties = additionalBinaryDataProperties; + } + + /// Span of the section in the markdown content. + public ContentSpan Span { get; } + + /// Child elements of the section. + public IList Elements { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentTable.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentTable.Serialization.cs new file mode 100644 index 000000000000..fcbbe8307050 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentTable.Serialization.cs @@ -0,0 +1,258 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// Table in a document, consisting table cells arranged in a rectangular layout. + public partial class DocumentTable : IJsonModel + { + /// Initializes a new instance of for deserialization. + internal DocumentTable() + { + } + + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentTable)} does not support writing '{format}' format."); + } + writer.WritePropertyName("rowCount"u8); + writer.WriteNumberValue(RowCount); + writer.WritePropertyName("columnCount"u8); + writer.WriteNumberValue(ColumnCount); + writer.WritePropertyName("cells"u8); + writer.WriteStartArray(); + foreach (DocumentTableCell item in Cells) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + if (Optional.IsDefined(Source)) + { + writer.WritePropertyName("source"u8); + writer.WriteStringValue(Source); + } + if (Optional.IsDefined(Span)) + { + writer.WritePropertyName("span"u8); + writer.WriteObjectValue(Span, options); + } + if (Optional.IsDefined(Caption)) + { + writer.WritePropertyName("caption"u8); + writer.WriteObjectValue(Caption, options); + } + if (Optional.IsCollectionDefined(Footnotes)) + { + writer.WritePropertyName("footnotes"u8); + writer.WriteStartArray(); + foreach (DocumentFootnote item in Footnotes) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (Optional.IsDefined(Role)) + { + writer.WritePropertyName("role"u8); + writer.WriteStringValue(Role.Value.ToString()); + } + if (options.Format != "W" && _additionalBinaryDataProperties != null) + { + foreach (var item in _additionalBinaryDataProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + DocumentTable IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected virtual DocumentTable JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentTable)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeDocumentTable(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static DocumentTable DeserializeDocumentTable(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + int rowCount = default; + int columnCount = default; + IList cells = default; + string source = default; + ContentSpan span = default; + DocumentCaption caption = default; + IList footnotes = default; + SemanticRole? role = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("rowCount"u8)) + { + rowCount = prop.Value.GetInt32(); + continue; + } + if (prop.NameEquals("columnCount"u8)) + { + columnCount = prop.Value.GetInt32(); + continue; + } + if (prop.NameEquals("cells"u8)) + { + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(DocumentTableCell.DeserializeDocumentTableCell(item, options)); + } + cells = array; + continue; + } + if (prop.NameEquals("source"u8)) + { + source = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("span"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + span = ContentSpan.DeserializeContentSpan(prop.Value, options); + continue; + } + if (prop.NameEquals("caption"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + caption = DocumentCaption.DeserializeDocumentCaption(prop.Value, options); + continue; + } + if (prop.NameEquals("footnotes"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(DocumentFootnote.DeserializeDocumentFootnote(item, options)); + } + footnotes = array; + continue; + } + if (prop.NameEquals("role"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + role = new SemanticRole(prop.Value.GetString()); + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new DocumentTable( + rowCount, + columnCount, + cells, + source, + span, + caption, + footnotes ?? new ChangeTrackingList(), + role, + additionalBinaryDataProperties); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(DocumentTable)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + DocumentTable IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected virtual DocumentTable PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeDocumentTable(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(DocumentTable)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentTable.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentTable.cs new file mode 100644 index 000000000000..fe1a01e723ad --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentTable.cs @@ -0,0 +1,79 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.AI.ContentUnderstanding +{ + /// Table in a document, consisting table cells arranged in a rectangular layout. + public partial class DocumentTable + { + /// Keeps track of any properties unknown to the library. + private protected readonly IDictionary _additionalBinaryDataProperties; + + /// Initializes a new instance of . + /// Number of rows in the table. + /// Number of columns in the table. + /// Cells contained within the table. + internal DocumentTable(int rowCount, int columnCount, IEnumerable cells) + { + RowCount = rowCount; + ColumnCount = columnCount; + Cells = cells.ToList(); + Footnotes = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// Number of rows in the table. + /// Number of columns in the table. + /// Cells contained within the table. + /// Encoded source that identifies the position of the table in the content. + /// Span of the table in the markdown content. + /// Table caption. + /// List of table footnotes. + /// Semantic role of the table. + /// Keeps track of any properties unknown to the library. + internal DocumentTable(int rowCount, int columnCount, IList cells, string source, ContentSpan span, DocumentCaption caption, IList footnotes, SemanticRole? role, IDictionary additionalBinaryDataProperties) + { + RowCount = rowCount; + ColumnCount = columnCount; + Cells = cells; + Source = source; + Span = span; + Caption = caption; + Footnotes = footnotes; + Role = role; + _additionalBinaryDataProperties = additionalBinaryDataProperties; + } + + /// Number of rows in the table. + public int RowCount { get; } + + /// Number of columns in the table. + public int ColumnCount { get; } + + /// Cells contained within the table. + public IList Cells { get; } + + /// Encoded source that identifies the position of the table in the content. + public string Source { get; } + + /// Span of the table in the markdown content. + public ContentSpan Span { get; } + + /// Table caption. + public DocumentCaption Caption { get; } + + /// List of table footnotes. + public IList Footnotes { get; } + + /// Semantic role of the table. + public SemanticRole? Role { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentTableCell.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentTableCell.Serialization.cs new file mode 100644 index 000000000000..d62075b63b57 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentTableCell.Serialization.cs @@ -0,0 +1,276 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// Table cell in a document table. + public partial class DocumentTableCell : IJsonModel + { + /// Initializes a new instance of for deserialization. + internal DocumentTableCell() + { + } + + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentTableCell)} does not support writing '{format}' format."); + } + if (Optional.IsDefined(Kind)) + { + writer.WritePropertyName("kind"u8); + writer.WriteStringValue(Kind.Value.ToString()); + } + writer.WritePropertyName("rowIndex"u8); + writer.WriteNumberValue(RowIndex); + writer.WritePropertyName("columnIndex"u8); + writer.WriteNumberValue(ColumnIndex); + if (Optional.IsDefined(RowSpan)) + { + writer.WritePropertyName("rowSpan"u8); + writer.WriteNumberValue(RowSpan.Value); + } + if (Optional.IsDefined(ColumnSpan)) + { + writer.WritePropertyName("columnSpan"u8); + writer.WriteNumberValue(ColumnSpan.Value); + } + writer.WritePropertyName("content"u8); + writer.WriteStringValue(Content); + if (Optional.IsDefined(Source)) + { + writer.WritePropertyName("source"u8); + writer.WriteStringValue(Source); + } + if (Optional.IsDefined(Span)) + { + writer.WritePropertyName("span"u8); + writer.WriteObjectValue(Span, options); + } + if (Optional.IsCollectionDefined(Elements)) + { + writer.WritePropertyName("elements"u8); + writer.WriteStartArray(); + foreach (string item in Elements) + { + if (item == null) + { + writer.WriteNullValue(); + continue; + } + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + } + if (options.Format != "W" && _additionalBinaryDataProperties != null) + { + foreach (var item in _additionalBinaryDataProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + DocumentTableCell IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected virtual DocumentTableCell JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentTableCell)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeDocumentTableCell(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static DocumentTableCell DeserializeDocumentTableCell(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + DocumentTableCellKind? kind = default; + int rowIndex = default; + int columnIndex = default; + int? rowSpan = default; + int? columnSpan = default; + string content = default; + string source = default; + ContentSpan span = default; + IList elements = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("kind"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + kind = new DocumentTableCellKind(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("rowIndex"u8)) + { + rowIndex = prop.Value.GetInt32(); + continue; + } + if (prop.NameEquals("columnIndex"u8)) + { + columnIndex = prop.Value.GetInt32(); + continue; + } + if (prop.NameEquals("rowSpan"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + rowSpan = prop.Value.GetInt32(); + continue; + } + if (prop.NameEquals("columnSpan"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + columnSpan = prop.Value.GetInt32(); + continue; + } + if (prop.NameEquals("content"u8)) + { + content = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("source"u8)) + { + source = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("span"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + span = ContentSpan.DeserializeContentSpan(prop.Value, options); + continue; + } + if (prop.NameEquals("elements"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + if (item.ValueKind == JsonValueKind.Null) + { + array.Add(null); + } + else + { + array.Add(item.GetString()); + } + } + elements = array; + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new DocumentTableCell( + kind, + rowIndex, + columnIndex, + rowSpan, + columnSpan, + content, + source, + span, + elements ?? new ChangeTrackingList(), + additionalBinaryDataProperties); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(DocumentTableCell)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + DocumentTableCell IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected virtual DocumentTableCell PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeDocumentTableCell(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(DocumentTableCell)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentTableCell.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentTableCell.cs new file mode 100644 index 000000000000..758f631e6d5b --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentTableCell.cs @@ -0,0 +1,83 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + /// Table cell in a document table. + public partial class DocumentTableCell + { + /// Keeps track of any properties unknown to the library. + private protected readonly IDictionary _additionalBinaryDataProperties; + + /// Initializes a new instance of . + /// Row index of the cell. + /// Column index of the cell. + /// Content of the table cell. + internal DocumentTableCell(int rowIndex, int columnIndex, string content) + { + RowIndex = rowIndex; + ColumnIndex = columnIndex; + Content = content; + Elements = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// Table cell kind. + /// Row index of the cell. + /// Column index of the cell. + /// Number of rows spanned by this cell. + /// Number of columns spanned by this cell. + /// Content of the table cell. + /// Encoded source that identifies the position of the table cell in the content. + /// Span of the table cell in the markdown content. + /// Child elements of the table cell. + /// Keeps track of any properties unknown to the library. + internal DocumentTableCell(DocumentTableCellKind? kind, int rowIndex, int columnIndex, int? rowSpan, int? columnSpan, string content, string source, ContentSpan span, IList elements, IDictionary additionalBinaryDataProperties) + { + Kind = kind; + RowIndex = rowIndex; + ColumnIndex = columnIndex; + RowSpan = rowSpan; + ColumnSpan = columnSpan; + Content = content; + Source = source; + Span = span; + Elements = elements; + _additionalBinaryDataProperties = additionalBinaryDataProperties; + } + + /// Table cell kind. + public DocumentTableCellKind? Kind { get; } + + /// Row index of the cell. + public int RowIndex { get; } + + /// Column index of the cell. + public int ColumnIndex { get; } + + /// Number of rows spanned by this cell. + public int? RowSpan { get; } + + /// Number of columns spanned by this cell. + public int? ColumnSpan { get; } + + /// Content of the table cell. + public string Content { get; } + + /// Encoded source that identifies the position of the table cell in the content. + public string Source { get; } + + /// Span of the table cell in the markdown content. + public ContentSpan Span { get; } + + /// Child elements of the table cell. + public IList Elements { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentTableCellKind.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentTableCellKind.cs new file mode 100644 index 000000000000..958e0efc0dde --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentTableCellKind.cs @@ -0,0 +1,85 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.AI.ContentUnderstanding +{ + /// Table cell kind. + public readonly partial struct DocumentTableCellKind : IEquatable + { + private readonly string _value; + /// Main content/data. + private const string ContentValue = "content"; + /// Description of the row content. + private const string RowHeaderValue = "rowHeader"; + /// Description the column content. + private const string ColumnHeaderValue = "columnHeader"; + /// Description of the row headers, usually located at the top left corner of a table. + private const string StubHeadValue = "stubHead"; + /// Description of the content in (parts of) the table. + private const string DescriptionValue = "description"; + + /// Initializes a new instance of . + /// The value. + /// is null. + public DocumentTableCellKind(string value) + { + Argument.AssertNotNull(value, nameof(value)); + + _value = value; + } + + /// Main content/data. + public static DocumentTableCellKind Content { get; } = new DocumentTableCellKind(ContentValue); + + /// Description of the row content. + public static DocumentTableCellKind RowHeader { get; } = new DocumentTableCellKind(RowHeaderValue); + + /// Description the column content. + public static DocumentTableCellKind ColumnHeader { get; } = new DocumentTableCellKind(ColumnHeaderValue); + + /// Description of the row headers, usually located at the top left corner of a table. + public static DocumentTableCellKind StubHead { get; } = new DocumentTableCellKind(StubHeadValue); + + /// Description of the content in (parts of) the table. + public static DocumentTableCellKind Description { get; } = new DocumentTableCellKind(DescriptionValue); + + /// Determines if two values are the same. + /// The left value to compare. + /// The right value to compare. + public static bool operator ==(DocumentTableCellKind left, DocumentTableCellKind right) => left.Equals(right); + + /// Determines if two values are not the same. + /// The left value to compare. + /// The right value to compare. + public static bool operator !=(DocumentTableCellKind left, DocumentTableCellKind right) => !left.Equals(right); + + /// Converts a string to a . + /// The value. + public static implicit operator DocumentTableCellKind(string value) => new DocumentTableCellKind(value); + + /// Converts a string to a . + /// The value. + public static implicit operator DocumentTableCellKind?(string value) => value == null ? null : new DocumentTableCellKind(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is DocumentTableCellKind other && Equals(other); + + /// + public bool Equals(DocumentTableCellKind other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + + /// + public override string ToString() => _value; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentWord.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentWord.Serialization.cs new file mode 100644 index 000000000000..37d26046ffb0 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentWord.Serialization.cs @@ -0,0 +1,187 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// + /// Word in a document, consisting of a contiguous sequence of characters. + /// For non-space delimited languages, such as Chinese, Japanese, and Korean, + /// each character is represented as its own word. + /// + public partial class DocumentWord : IJsonModel + { + /// Initializes a new instance of for deserialization. + internal DocumentWord() + { + } + + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentWord)} does not support writing '{format}' format."); + } + writer.WritePropertyName("content"u8); + writer.WriteStringValue(Content); + if (Optional.IsDefined(Source)) + { + writer.WritePropertyName("source"u8); + writer.WriteStringValue(Source); + } + if (Optional.IsDefined(Span)) + { + writer.WritePropertyName("span"u8); + writer.WriteObjectValue(Span, options); + } + if (Optional.IsDefined(Confidence)) + { + writer.WritePropertyName("confidence"u8); + writer.WriteNumberValue(Confidence.Value); + } + if (options.Format != "W" && _additionalBinaryDataProperties != null) + { + foreach (var item in _additionalBinaryDataProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + DocumentWord IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected virtual DocumentWord JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentWord)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeDocumentWord(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static DocumentWord DeserializeDocumentWord(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string content = default; + string source = default; + ContentSpan span = default; + float? confidence = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("content"u8)) + { + content = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("source"u8)) + { + source = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("span"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + span = ContentSpan.DeserializeContentSpan(prop.Value, options); + continue; + } + if (prop.NameEquals("confidence"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + confidence = prop.Value.GetSingle(); + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new DocumentWord(content, source, span, confidence, additionalBinaryDataProperties); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(DocumentWord)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + DocumentWord IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected virtual DocumentWord PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeDocumentWord(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(DocumentWord)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentWord.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentWord.cs new file mode 100644 index 000000000000..e8cf3a268a74 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/DocumentWord.cs @@ -0,0 +1,57 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + /// + /// Word in a document, consisting of a contiguous sequence of characters. + /// For non-space delimited languages, such as Chinese, Japanese, and Korean, + /// each character is represented as its own word. + /// + public partial class DocumentWord + { + /// Keeps track of any properties unknown to the library. + private protected readonly IDictionary _additionalBinaryDataProperties; + + /// Initializes a new instance of . + /// Word text. + internal DocumentWord(string content) + { + Content = content; + } + + /// Initializes a new instance of . + /// Word text. + /// Encoded source that identifies the position of the word in the content. + /// Span of the word in the markdown content. + /// Confidence of predicting the word. + /// Keeps track of any properties unknown to the library. + internal DocumentWord(string content, string source, ContentSpan span, float? confidence, IDictionary additionalBinaryDataProperties) + { + Content = content; + Source = source; + Span = span; + Confidence = confidence; + _additionalBinaryDataProperties = additionalBinaryDataProperties; + } + + /// Word text. + public string Content { get; } + + /// Encoded source that identifies the position of the word in the content. + public string Source { get; } + + /// Span of the word in the markdown content. + public ContentSpan Span { get; } + + /// Confidence of predicting the word. + public float? Confidence { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/GenerationMethod.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/GenerationMethod.cs new file mode 100644 index 000000000000..8bee38f16089 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/GenerationMethod.cs @@ -0,0 +1,75 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.AI.ContentUnderstanding +{ + /// Generation method. + public readonly partial struct GenerationMethod : IEquatable + { + private readonly string _value; + /// Values are generated freely based on the content. + private const string GenerateValue = "generate"; + /// Values are extracted as they appear in the content. + private const string ExtractValue = "extract"; + /// Values are classified against a predefined set of categories. + private const string ClassifyValue = "classify"; + + /// Initializes a new instance of . + /// The value. + /// is null. + public GenerationMethod(string value) + { + Argument.AssertNotNull(value, nameof(value)); + + _value = value; + } + + /// Values are generated freely based on the content. + public static GenerationMethod Generate { get; } = new GenerationMethod(GenerateValue); + + /// Values are extracted as they appear in the content. + public static GenerationMethod Extract { get; } = new GenerationMethod(ExtractValue); + + /// Values are classified against a predefined set of categories. + public static GenerationMethod Classify { get; } = new GenerationMethod(ClassifyValue); + + /// Determines if two values are the same. + /// The left value to compare. + /// The right value to compare. + public static bool operator ==(GenerationMethod left, GenerationMethod right) => left.Equals(right); + + /// Determines if two values are not the same. + /// The left value to compare. + /// The right value to compare. + public static bool operator !=(GenerationMethod left, GenerationMethod right) => !left.Equals(right); + + /// Converts a string to a . + /// The value. + public static implicit operator GenerationMethod(string value) => new GenerationMethod(value); + + /// Converts a string to a . + /// The value. + public static implicit operator GenerationMethod?(string value) => value == null ? null : new GenerationMethod(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is GenerationMethod other && Equals(other); + + /// + public bool Equals(GenerationMethod other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + + /// + public override string ToString() => _value; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/GrantCopyAuthorizationRequest1.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/GrantCopyAuthorizationRequest1.Serialization.cs new file mode 100644 index 000000000000..48fb1796f09a --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/GrantCopyAuthorizationRequest1.Serialization.cs @@ -0,0 +1,166 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.ContentUnderstanding +{ + /// The GrantCopyAuthorizationRequest1. + internal partial class GrantCopyAuthorizationRequest1 : IJsonModel + { + /// Initializes a new instance of for deserialization. + internal GrantCopyAuthorizationRequest1() + { + } + + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(GrantCopyAuthorizationRequest1)} does not support writing '{format}' format."); + } + writer.WritePropertyName("targetAzureResourceId"u8); + writer.WriteStringValue(TargetAzureResourceId); + if (Optional.IsDefined(TargetRegion)) + { + writer.WritePropertyName("targetRegion"u8); + writer.WriteStringValue(TargetRegion); + } + if (options.Format != "W" && _additionalBinaryDataProperties != null) + { + foreach (var item in _additionalBinaryDataProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + GrantCopyAuthorizationRequest1 IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected virtual GrantCopyAuthorizationRequest1 JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(GrantCopyAuthorizationRequest1)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeGrantCopyAuthorizationRequest1(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static GrantCopyAuthorizationRequest1 DeserializeGrantCopyAuthorizationRequest1(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string targetAzureResourceId = default; + string targetRegion = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("targetAzureResourceId"u8)) + { + targetAzureResourceId = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("targetRegion"u8)) + { + targetRegion = prop.Value.GetString(); + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new GrantCopyAuthorizationRequest1(targetAzureResourceId, targetRegion, additionalBinaryDataProperties); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(GrantCopyAuthorizationRequest1)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + GrantCopyAuthorizationRequest1 IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected virtual GrantCopyAuthorizationRequest1 PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeGrantCopyAuthorizationRequest1(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(GrantCopyAuthorizationRequest1)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// The to serialize into . + public static implicit operator RequestContent(GrantCopyAuthorizationRequest1 grantCopyAuthorizationRequest1) + { + if (grantCopyAuthorizationRequest1 == null) + { + return null; + } + Utf8JsonRequestContent content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(grantCopyAuthorizationRequest1, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/GrantCopyAuthorizationRequest1.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/GrantCopyAuthorizationRequest1.cs new file mode 100644 index 000000000000..d87e5529323a --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/GrantCopyAuthorizationRequest1.cs @@ -0,0 +1,43 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + /// The GrantCopyAuthorizationRequest1. + internal partial class GrantCopyAuthorizationRequest1 + { + /// Keeps track of any properties unknown to the library. + private protected readonly IDictionary _additionalBinaryDataProperties; + + /// Initializes a new instance of . + /// Azure resource ID of the target analyzer location. + internal GrantCopyAuthorizationRequest1(string targetAzureResourceId) + { + TargetAzureResourceId = targetAzureResourceId; + } + + /// Initializes a new instance of . + /// Azure resource ID of the target analyzer location. + /// Azure region of the target analyzer location. Defaults to current region. + /// Keeps track of any properties unknown to the library. + internal GrantCopyAuthorizationRequest1(string targetAzureResourceId, string targetRegion, IDictionary additionalBinaryDataProperties) + { + TargetAzureResourceId = targetAzureResourceId; + TargetRegion = targetRegion; + _additionalBinaryDataProperties = additionalBinaryDataProperties; + } + + /// Azure resource ID of the target analyzer location. + public string TargetAzureResourceId { get; } + + /// Azure region of the target analyzer location. Defaults to current region. + public string TargetRegion { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/IntegerField.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/IntegerField.Serialization.cs new file mode 100644 index 000000000000..505b94208cc3 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/IntegerField.Serialization.cs @@ -0,0 +1,182 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// Integer field extracted from the content. + public partial class IntegerField : ContentField, IJsonModel + { + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(IntegerField)} does not support writing '{format}' format."); + } + base.JsonModelWriteCore(writer, options); + writer.WritePropertyName("type"u8); + writer.WriteStringValue(FieldType.ToString()); + if (Optional.IsDefined(ValueInteger)) + { + writer.WritePropertyName("valueInteger"u8); + writer.WriteNumberValue(ValueInteger.Value); + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + IntegerField IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => (IntegerField)JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected override ContentField JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(IntegerField)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeIntegerField(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static IntegerField DeserializeIntegerField(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + ContentFieldType @type = default; + IList spans = default; + float? confidence = default; + string source = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + ContentFieldType fieldType = default; + long? valueInteger = default; + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("type"u8)) + { + @type = new ContentFieldType(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("spans"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(ContentSpan.DeserializeContentSpan(item, options)); + } + spans = array; + continue; + } + if (prop.NameEquals("confidence"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + confidence = prop.Value.GetSingle(); + continue; + } + if (prop.NameEquals("source"u8)) + { + source = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("type"u8)) + { + fieldType = new ContentFieldType(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("valueInteger"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + valueInteger = prop.Value.GetInt64(); + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new IntegerField( + @type, + spans ?? new ChangeTrackingList(), + confidence, + source, + additionalBinaryDataProperties, + fieldType, + valueInteger); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(IntegerField)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + IntegerField IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => (IntegerField)PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected override ContentField PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeIntegerField(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(IntegerField)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/IntegerField.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/IntegerField.cs new file mode 100644 index 000000000000..d09d5e02d8ac --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/IntegerField.cs @@ -0,0 +1,41 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + /// Integer field extracted from the content. + public partial class IntegerField : ContentField + { + /// Initializes a new instance of . + internal IntegerField() : base(ContentFieldType.Integer) + { + } + + /// Initializes a new instance of . + /// Semantic data type of the field value. + /// Span(s) associated with the field value in the markdown content. + /// Confidence of predicting the field value. + /// Encoded source that identifies the position of the field value in the content. + /// Keeps track of any properties unknown to the library. + /// Semantic data type of the field value. + /// Integer field value. + internal IntegerField(ContentFieldType @type, IList spans, float? confidence, string source, IDictionary additionalBinaryDataProperties, ContentFieldType fieldType, long? valueInteger) : base(@type, spans, confidence, source, additionalBinaryDataProperties) + { + FieldType = fieldType; + ValueInteger = valueInteger; + } + + /// Semantic data type of the field value. + internal ContentFieldType FieldType { get; set; } = "integer"; + + /// Integer field value. + public long? ValueInteger { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/JsonField.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/JsonField.Serialization.cs new file mode 100644 index 000000000000..1baca0eff34c --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/JsonField.Serialization.cs @@ -0,0 +1,189 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// JSON field extracted from the content. + public partial class JsonField : ContentField, IJsonModel + { + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(JsonField)} does not support writing '{format}' format."); + } + base.JsonModelWriteCore(writer, options); + writer.WritePropertyName("type"u8); + writer.WriteStringValue(FieldType.ToString()); + if (Optional.IsDefined(ValueJson)) + { + writer.WritePropertyName("valueJson"u8); +#if NET6_0_OR_GREATER + writer.WriteRawValue(ValueJson); +#else + using (JsonDocument document = JsonDocument.Parse(ValueJson)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + JsonField IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => (JsonField)JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected override ContentField JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(JsonField)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeJsonField(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static JsonField DeserializeJsonField(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + ContentFieldType @type = default; + IList spans = default; + float? confidence = default; + string source = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + ContentFieldType fieldType = default; + BinaryData valueJson = default; + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("type"u8)) + { + @type = new ContentFieldType(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("spans"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(ContentSpan.DeserializeContentSpan(item, options)); + } + spans = array; + continue; + } + if (prop.NameEquals("confidence"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + confidence = prop.Value.GetSingle(); + continue; + } + if (prop.NameEquals("source"u8)) + { + source = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("type"u8)) + { + fieldType = new ContentFieldType(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("valueJson"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + valueJson = BinaryData.FromString(prop.Value.GetRawText()); + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new JsonField( + @type, + spans ?? new ChangeTrackingList(), + confidence, + source, + additionalBinaryDataProperties, + fieldType, + valueJson); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(JsonField)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + JsonField IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => (JsonField)PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected override ContentField PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeJsonField(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(JsonField)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/JsonField.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/JsonField.cs new file mode 100644 index 000000000000..a6c17c4867c6 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/JsonField.cs @@ -0,0 +1,67 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// JSON field extracted from the content. + public partial class JsonField : ContentField + { + /// Initializes a new instance of . + internal JsonField() : base(ContentFieldType.Json) + { + } + + /// Initializes a new instance of . + /// Semantic data type of the field value. + /// Span(s) associated with the field value in the markdown content. + /// Confidence of predicting the field value. + /// Encoded source that identifies the position of the field value in the content. + /// Keeps track of any properties unknown to the library. + /// Semantic data type of the field value. + /// JSON field value. + internal JsonField(ContentFieldType @type, IList spans, float? confidence, string source, IDictionary additionalBinaryDataProperties, ContentFieldType fieldType, BinaryData valueJson) : base(@type, spans, confidence, source, additionalBinaryDataProperties) + { + FieldType = fieldType; + ValueJson = valueJson; + } + + /// Semantic data type of the field value. + internal ContentFieldType FieldType { get; set; } = "json"; + + /// + /// JSON field value. + /// To assign an object to this property use . + /// To assign an already formatted json string to this property use . + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo"). + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\""). + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }). + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}"). + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + public BinaryData ValueJson { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/KnowledgeSource.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/KnowledgeSource.Serialization.cs new file mode 100644 index 000000000000..d35de915ff29 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/KnowledgeSource.Serialization.cs @@ -0,0 +1,139 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// + /// Knowledge source. + /// Please note this is the abstract base class. The derived classes available for instantiation are: . + /// + [PersistableModelProxy(typeof(UnknownKnowledgeSource))] + public abstract partial class KnowledgeSource : IJsonModel + { + /// Initializes a new instance of for deserialization. + internal KnowledgeSource() + { + } + + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(KnowledgeSource)} does not support writing '{format}' format."); + } + writer.WritePropertyName("kind"u8); + writer.WriteStringValue(Kind.ToString()); + if (options.Format != "W" && _additionalBinaryDataProperties != null) + { + foreach (var item in _additionalBinaryDataProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + KnowledgeSource IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected virtual KnowledgeSource JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(KnowledgeSource)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeKnowledgeSource(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static KnowledgeSource DeserializeKnowledgeSource(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + if (element.TryGetProperty("kind"u8, out JsonElement discriminator)) + { + switch (discriminator.GetString()) + { + case "labeledData": + return LabeledDataKnowledgeSource.DeserializeLabeledDataKnowledgeSource(element, options); + } + } + return UnknownKnowledgeSource.DeserializeUnknownKnowledgeSource(element, options); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(KnowledgeSource)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + KnowledgeSource IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected virtual KnowledgeSource PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeKnowledgeSource(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(KnowledgeSource)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/KnowledgeSource.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/KnowledgeSource.cs new file mode 100644 index 000000000000..6dcfcf248d8a --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/KnowledgeSource.cs @@ -0,0 +1,41 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + /// + /// Knowledge source. + /// Please note this is the abstract base class. The derived classes available for instantiation are: . + /// + public abstract partial class KnowledgeSource + { + /// Keeps track of any properties unknown to the library. + private protected readonly IDictionary _additionalBinaryDataProperties; + + /// Initializes a new instance of . + /// The kind of knowledge source. + private protected KnowledgeSource(KnowledgeSourceKind kind) + { + Kind = kind; + } + + /// Initializes a new instance of . + /// The kind of knowledge source. + /// Keeps track of any properties unknown to the library. + internal KnowledgeSource(KnowledgeSourceKind kind, IDictionary additionalBinaryDataProperties) + { + Kind = kind; + _additionalBinaryDataProperties = additionalBinaryDataProperties; + } + + /// The kind of knowledge source. + internal KnowledgeSourceKind Kind { get; set; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/KnowledgeSourceKind.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/KnowledgeSourceKind.cs new file mode 100644 index 000000000000..e7ff261adbf3 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/KnowledgeSourceKind.cs @@ -0,0 +1,65 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.AI.ContentUnderstanding +{ + /// Knowledge source kind. + internal readonly partial struct KnowledgeSourceKind : IEquatable + { + private readonly string _value; + /// A labeled data knowledge source. + private const string LabeledDataValue = "labeledData"; + + /// Initializes a new instance of . + /// The value. + /// is null. + public KnowledgeSourceKind(string value) + { + Argument.AssertNotNull(value, nameof(value)); + + _value = value; + } + + /// A labeled data knowledge source. + public static KnowledgeSourceKind LabeledData { get; } = new KnowledgeSourceKind(LabeledDataValue); + + /// Determines if two values are the same. + /// The left value to compare. + /// The right value to compare. + public static bool operator ==(KnowledgeSourceKind left, KnowledgeSourceKind right) => left.Equals(right); + + /// Determines if two values are not the same. + /// The left value to compare. + /// The right value to compare. + public static bool operator !=(KnowledgeSourceKind left, KnowledgeSourceKind right) => !left.Equals(right); + + /// Converts a string to a . + /// The value. + public static implicit operator KnowledgeSourceKind(string value) => new KnowledgeSourceKind(value); + + /// Converts a string to a . + /// The value. + public static implicit operator KnowledgeSourceKind?(string value) => value == null ? null : new KnowledgeSourceKind(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is KnowledgeSourceKind other && Equals(other); + + /// + public bool Equals(KnowledgeSourceKind other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + + /// + public override string ToString() => _value; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/LabeledDataKnowledgeSource.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/LabeledDataKnowledgeSource.Serialization.cs new file mode 100644 index 000000000000..4779ccfb0379 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/LabeledDataKnowledgeSource.Serialization.cs @@ -0,0 +1,153 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// Labeled data knowledge source. + public partial class LabeledDataKnowledgeSource : KnowledgeSource, IJsonModel + { + /// Initializes a new instance of for deserialization. + internal LabeledDataKnowledgeSource() + { + } + + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LabeledDataKnowledgeSource)} does not support writing '{format}' format."); + } + base.JsonModelWriteCore(writer, options); + writer.WritePropertyName("containerUrl"u8); + writer.WriteStringValue(ContainerUrl.AbsoluteUri); + if (Optional.IsDefined(Prefix)) + { + writer.WritePropertyName("prefix"u8); + writer.WriteStringValue(Prefix); + } + writer.WritePropertyName("fileListPath"u8); + writer.WriteStringValue(FileListPath); + } + + /// The JSON reader. + /// The client options for reading and writing models. + LabeledDataKnowledgeSource IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => (LabeledDataKnowledgeSource)JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected override KnowledgeSource JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LabeledDataKnowledgeSource)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeLabeledDataKnowledgeSource(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static LabeledDataKnowledgeSource DeserializeLabeledDataKnowledgeSource(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + KnowledgeSourceKind kind = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + Uri containerUrl = default; + string prefix = default; + string fileListPath = default; + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("kind"u8)) + { + kind = new KnowledgeSourceKind(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("containerUrl"u8)) + { + containerUrl = new Uri(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("prefix"u8)) + { + prefix = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("fileListPath"u8)) + { + fileListPath = prop.Value.GetString(); + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new LabeledDataKnowledgeSource(kind, additionalBinaryDataProperties, containerUrl, prefix, fileListPath); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(LabeledDataKnowledgeSource)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + LabeledDataKnowledgeSource IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => (LabeledDataKnowledgeSource)PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected override KnowledgeSource PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeLabeledDataKnowledgeSource(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(LabeledDataKnowledgeSource)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/LabeledDataKnowledgeSource.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/LabeledDataKnowledgeSource.cs new file mode 100644 index 000000000000..1c5eebbf884a --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/LabeledDataKnowledgeSource.cs @@ -0,0 +1,51 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + /// Labeled data knowledge source. + public partial class LabeledDataKnowledgeSource : KnowledgeSource + { + /// Initializes a new instance of . + /// The URL of the blob container containing labeled data. + /// An optional path to a file listing specific blobs to include. + /// or is null. + public LabeledDataKnowledgeSource(Uri containerUrl, string fileListPath) : base(KnowledgeSourceKind.LabeledData) + { + Argument.AssertNotNull(containerUrl, nameof(containerUrl)); + Argument.AssertNotNull(fileListPath, nameof(fileListPath)); + + ContainerUrl = containerUrl; + FileListPath = fileListPath; + } + + /// Initializes a new instance of . + /// The kind of knowledge source. + /// Keeps track of any properties unknown to the library. + /// The URL of the blob container containing labeled data. + /// An optional prefix to filter blobs within the container. + /// An optional path to a file listing specific blobs to include. + internal LabeledDataKnowledgeSource(KnowledgeSourceKind kind, IDictionary additionalBinaryDataProperties, Uri containerUrl, string prefix, string fileListPath) : base(kind, additionalBinaryDataProperties) + { + ContainerUrl = containerUrl; + Prefix = prefix; + FileListPath = fileListPath; + } + + /// The URL of the blob container containing labeled data. + public Uri ContainerUrl { get; set; } + + /// An optional prefix to filter blobs within the container. + public string Prefix { get; set; } + + /// An optional path to a file listing specific blobs to include. + public string FileListPath { get; set; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/LengthUnit.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/LengthUnit.cs new file mode 100644 index 000000000000..8e529acbd36d --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/LengthUnit.cs @@ -0,0 +1,70 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.AI.ContentUnderstanding +{ + /// Length unit used by the width, height, and source properties. + public readonly partial struct LengthUnit : IEquatable + { + private readonly string _value; + /// Pixel unit. + private const string PixelValue = "pixel"; + /// Inch unit. + private const string InchValue = "inch"; + + /// Initializes a new instance of . + /// The value. + /// is null. + public LengthUnit(string value) + { + Argument.AssertNotNull(value, nameof(value)); + + _value = value; + } + + /// Pixel unit. + public static LengthUnit Pixel { get; } = new LengthUnit(PixelValue); + + /// Inch unit. + public static LengthUnit Inch { get; } = new LengthUnit(InchValue); + + /// Determines if two values are the same. + /// The left value to compare. + /// The right value to compare. + public static bool operator ==(LengthUnit left, LengthUnit right) => left.Equals(right); + + /// Determines if two values are not the same. + /// The left value to compare. + /// The right value to compare. + public static bool operator !=(LengthUnit left, LengthUnit right) => !left.Equals(right); + + /// Converts a string to a . + /// The value. + public static implicit operator LengthUnit(string value) => new LengthUnit(value); + + /// Converts a string to a . + /// The value. + public static implicit operator LengthUnit?(string value) => value == null ? null : new LengthUnit(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is LengthUnit other && Equals(other); + + /// + public bool Equals(LengthUnit other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + + /// + public override string ToString() => _value; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/MediaContent.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/MediaContent.Serialization.cs new file mode 100644 index 000000000000..fc42de8a5d72 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/MediaContent.Serialization.cs @@ -0,0 +1,174 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// + /// Media content base class. + /// Please note this is the abstract base class. The derived classes available for instantiation are: and . + /// + [PersistableModelProxy(typeof(UnknownMediaContent))] + public abstract partial class MediaContent : IJsonModel + { + /// Initializes a new instance of for deserialization. + internal MediaContent() + { + } + + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(MediaContent)} does not support writing '{format}' format."); + } + writer.WritePropertyName("kind"u8); + writer.WriteStringValue(Kind.ToString()); + writer.WritePropertyName("mimeType"u8); + writer.WriteStringValue(MimeType); + if (Optional.IsDefined(AnalyzerId)) + { + writer.WritePropertyName("analyzerId"u8); + writer.WriteStringValue(AnalyzerId); + } + if (Optional.IsDefined(Category)) + { + writer.WritePropertyName("category"u8); + writer.WriteStringValue(Category); + } + if (Optional.IsDefined(Path)) + { + writer.WritePropertyName("path"u8); + writer.WriteStringValue(Path); + } + if (Optional.IsDefined(Markdown)) + { + writer.WritePropertyName("markdown"u8); + writer.WriteStringValue(Markdown); + } + if (Optional.IsCollectionDefined(Fields)) + { + writer.WritePropertyName("fields"u8); + writer.WriteStartObject(); + foreach (var item in Fields) + { + writer.WritePropertyName(item.Key); + writer.WriteObjectValue(item.Value, options); + } + writer.WriteEndObject(); + } + if (options.Format != "W" && _additionalBinaryDataProperties != null) + { + foreach (var item in _additionalBinaryDataProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + MediaContent IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected virtual MediaContent JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(MediaContent)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeMediaContent(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static MediaContent DeserializeMediaContent(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + if (element.TryGetProperty("kind"u8, out JsonElement discriminator)) + { + switch (discriminator.GetString()) + { + case "document": + return DocumentContent.DeserializeDocumentContent(element, options); + case "audioVisual": + return AudioVisualContent.DeserializeAudioVisualContent(element, options); + } + } + return UnknownMediaContent.DeserializeUnknownMediaContent(element, options); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(MediaContent)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + MediaContent IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected virtual MediaContent PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeMediaContent(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(MediaContent)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/MediaContent.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/MediaContent.cs new file mode 100644 index 000000000000..efbe680527b9 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/MediaContent.cs @@ -0,0 +1,74 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + /// + /// Media content base class. + /// Please note this is the abstract base class. The derived classes available for instantiation are: and . + /// + public abstract partial class MediaContent + { + /// Keeps track of any properties unknown to the library. + private protected readonly IDictionary _additionalBinaryDataProperties; + + /// Initializes a new instance of . + /// Content kind. + /// Detected MIME type of the content. Ex. application/pdf, image/jpeg, etc. + private protected MediaContent(MediaContentKind kind, string mimeType) + { + Kind = kind; + MimeType = mimeType; + Fields = new ChangeTrackingDictionary(); + } + + /// Initializes a new instance of . + /// Content kind. + /// Detected MIME type of the content. Ex. application/pdf, image/jpeg, etc. + /// The analyzer that generated this content. + /// Classified content category. + /// The path of the content in the input. + /// Markdown representation of the content. + /// Extracted fields from the content. + /// Keeps track of any properties unknown to the library. + internal MediaContent(MediaContentKind kind, string mimeType, string analyzerId, string category, string path, string markdown, IDictionary fields, IDictionary additionalBinaryDataProperties) + { + Kind = kind; + MimeType = mimeType; + AnalyzerId = analyzerId; + Category = category; + Path = path; + Markdown = markdown; + Fields = fields; + _additionalBinaryDataProperties = additionalBinaryDataProperties; + } + + /// Content kind. + internal MediaContentKind Kind { get; set; } + + /// Detected MIME type of the content. Ex. application/pdf, image/jpeg, etc. + public string MimeType { get; } + + /// The analyzer that generated this content. + public string AnalyzerId { get; } + + /// Classified content category. + public string Category { get; } + + /// The path of the content in the input. + public string Path { get; } + + /// Markdown representation of the content. + public string Markdown { get; } + + /// Extracted fields from the content. + public IDictionary Fields { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/MediaContentKind.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/MediaContentKind.cs new file mode 100644 index 000000000000..db90270df5f6 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/MediaContentKind.cs @@ -0,0 +1,70 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.AI.ContentUnderstanding +{ + /// Kind of media content. + internal readonly partial struct MediaContentKind : IEquatable + { + private readonly string _value; + /// Document content, such as pdf, image, txt, etc. + private const string DocumentValue = "document"; + /// Audio visual content, such as mp3, mp4, etc. + private const string AudioVisualValue = "audioVisual"; + + /// Initializes a new instance of . + /// The value. + /// is null. + public MediaContentKind(string value) + { + Argument.AssertNotNull(value, nameof(value)); + + _value = value; + } + + /// Document content, such as pdf, image, txt, etc. + public static MediaContentKind Document { get; } = new MediaContentKind(DocumentValue); + + /// Audio visual content, such as mp3, mp4, etc. + public static MediaContentKind AudioVisual { get; } = new MediaContentKind(AudioVisualValue); + + /// Determines if two values are the same. + /// The left value to compare. + /// The right value to compare. + public static bool operator ==(MediaContentKind left, MediaContentKind right) => left.Equals(right); + + /// Determines if two values are not the same. + /// The left value to compare. + /// The right value to compare. + public static bool operator !=(MediaContentKind left, MediaContentKind right) => !left.Equals(right); + + /// Converts a string to a . + /// The value. + public static implicit operator MediaContentKind(string value) => new MediaContentKind(value); + + /// Converts a string to a . + /// The value. + public static implicit operator MediaContentKind?(string value) => value == null ? null : new MediaContentKind(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is MediaContentKind other && Equals(other); + + /// + public bool Equals(MediaContentKind other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + + /// + public override string ToString() => _value; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/NumberField.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/NumberField.Serialization.cs new file mode 100644 index 000000000000..2f4eacea729a --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/NumberField.Serialization.cs @@ -0,0 +1,182 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// Number field extracted from the content. + public partial class NumberField : ContentField, IJsonModel + { + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(NumberField)} does not support writing '{format}' format."); + } + base.JsonModelWriteCore(writer, options); + writer.WritePropertyName("type"u8); + writer.WriteStringValue(FieldType.ToString()); + if (Optional.IsDefined(ValueNumber)) + { + writer.WritePropertyName("valueNumber"u8); + writer.WriteNumberValue(ValueNumber.Value); + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + NumberField IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => (NumberField)JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected override ContentField JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(NumberField)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeNumberField(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static NumberField DeserializeNumberField(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + ContentFieldType @type = default; + IList spans = default; + float? confidence = default; + string source = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + ContentFieldType fieldType = default; + double? valueNumber = default; + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("type"u8)) + { + @type = new ContentFieldType(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("spans"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(ContentSpan.DeserializeContentSpan(item, options)); + } + spans = array; + continue; + } + if (prop.NameEquals("confidence"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + confidence = prop.Value.GetSingle(); + continue; + } + if (prop.NameEquals("source"u8)) + { + source = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("type"u8)) + { + fieldType = new ContentFieldType(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("valueNumber"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + valueNumber = prop.Value.GetDouble(); + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new NumberField( + @type, + spans ?? new ChangeTrackingList(), + confidence, + source, + additionalBinaryDataProperties, + fieldType, + valueNumber); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(NumberField)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + NumberField IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => (NumberField)PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected override ContentField PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeNumberField(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(NumberField)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/NumberField.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/NumberField.cs new file mode 100644 index 000000000000..825192f21ad1 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/NumberField.cs @@ -0,0 +1,41 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + /// Number field extracted from the content. + public partial class NumberField : ContentField + { + /// Initializes a new instance of . + internal NumberField() : base(ContentFieldType.Number) + { + } + + /// Initializes a new instance of . + /// Semantic data type of the field value. + /// Span(s) associated with the field value in the markdown content. + /// Confidence of predicting the field value. + /// Encoded source that identifies the position of the field value in the content. + /// Keeps track of any properties unknown to the library. + /// Semantic data type of the field value. + /// Number field value. + internal NumberField(ContentFieldType @type, IList spans, float? confidence, string source, IDictionary additionalBinaryDataProperties, ContentFieldType fieldType, double? valueNumber) : base(@type, spans, confidence, source, additionalBinaryDataProperties) + { + FieldType = fieldType; + ValueNumber = valueNumber; + } + + /// Semantic data type of the field value. + internal ContentFieldType FieldType { get; set; } = "number"; + + /// Number field value. + public double? ValueNumber { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ObjectField.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ObjectField.Serialization.cs new file mode 100644 index 000000000000..387ace435551 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ObjectField.Serialization.cs @@ -0,0 +1,193 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// Object field extracted from the content. + public partial class ObjectField : ContentField, IJsonModel + { + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ObjectField)} does not support writing '{format}' format."); + } + base.JsonModelWriteCore(writer, options); + writer.WritePropertyName("type"u8); + writer.WriteStringValue(FieldType.ToString()); + if (Optional.IsCollectionDefined(ValueObject)) + { + writer.WritePropertyName("valueObject"u8); + writer.WriteStartObject(); + foreach (var item in ValueObject) + { + writer.WritePropertyName(item.Key); + writer.WriteObjectValue(item.Value, options); + } + writer.WriteEndObject(); + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + ObjectField IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => (ObjectField)JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected override ContentField JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ObjectField)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeObjectField(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static ObjectField DeserializeObjectField(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + ContentFieldType @type = default; + IList spans = default; + float? confidence = default; + string source = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + ContentFieldType fieldType = default; + IDictionary valueObject = default; + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("type"u8)) + { + @type = new ContentFieldType(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("spans"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(ContentSpan.DeserializeContentSpan(item, options)); + } + spans = array; + continue; + } + if (prop.NameEquals("confidence"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + confidence = prop.Value.GetSingle(); + continue; + } + if (prop.NameEquals("source"u8)) + { + source = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("type"u8)) + { + fieldType = new ContentFieldType(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("valueObject"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + Dictionary dictionary = new Dictionary(); + foreach (var prop0 in prop.Value.EnumerateObject()) + { + dictionary.Add(prop0.Name, DeserializeContentField(prop0.Value, options)); + } + valueObject = dictionary; + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new ObjectField( + @type, + spans ?? new ChangeTrackingList(), + confidence, + source, + additionalBinaryDataProperties, + fieldType, + valueObject ?? new ChangeTrackingDictionary()); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(ObjectField)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + ObjectField IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => (ObjectField)PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected override ContentField PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeObjectField(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(ObjectField)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ObjectField.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ObjectField.cs new file mode 100644 index 000000000000..5aa7524d5e98 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ObjectField.cs @@ -0,0 +1,42 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + /// Object field extracted from the content. + public partial class ObjectField : ContentField + { + /// Initializes a new instance of . + internal ObjectField() : base(ContentFieldType.Object) + { + ValueObject = new ChangeTrackingDictionary(); + } + + /// Initializes a new instance of . + /// Semantic data type of the field value. + /// Span(s) associated with the field value in the markdown content. + /// Confidence of predicting the field value. + /// Encoded source that identifies the position of the field value in the content. + /// Keeps track of any properties unknown to the library. + /// Semantic data type of the field value. + /// Object field value. + internal ObjectField(ContentFieldType @type, IList spans, float? confidence, string source, IDictionary additionalBinaryDataProperties, ContentFieldType fieldType, IDictionary valueObject) : base(@type, spans, confidence, source, additionalBinaryDataProperties) + { + FieldType = fieldType; + ValueObject = valueObject; + } + + /// Semantic data type of the field value. + internal ContentFieldType FieldType { get; set; } = "object"; + + /// Object field value. + public IDictionary ValueObject { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/OperationState.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/OperationState.cs new file mode 100644 index 000000000000..ac4e2cd05b8b --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/OperationState.cs @@ -0,0 +1,85 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.AI.ContentUnderstanding +{ + /// Enum describing allowed operation states. + internal readonly partial struct OperationState : IEquatable + { + private readonly string _value; + /// The operation has not started. + private const string NotStartedValue = "NotStarted"; + /// The operation is in progress. + private const string RunningValue = "Running"; + /// The operation has completed successfully. + private const string SucceededValue = "Succeeded"; + /// The operation has failed. + private const string FailedValue = "Failed"; + /// The operation has been canceled by the user. + private const string CanceledValue = "Canceled"; + + /// Initializes a new instance of . + /// The value. + /// is null. + public OperationState(string value) + { + Argument.AssertNotNull(value, nameof(value)); + + _value = value; + } + + /// The operation has not started. + public static OperationState NotStarted { get; } = new OperationState(NotStartedValue); + + /// The operation is in progress. + public static OperationState Running { get; } = new OperationState(RunningValue); + + /// The operation has completed successfully. + public static OperationState Succeeded { get; } = new OperationState(SucceededValue); + + /// The operation has failed. + public static OperationState Failed { get; } = new OperationState(FailedValue); + + /// The operation has been canceled by the user. + public static OperationState Canceled { get; } = new OperationState(CanceledValue); + + /// Determines if two values are the same. + /// The left value to compare. + /// The right value to compare. + public static bool operator ==(OperationState left, OperationState right) => left.Equals(right); + + /// Determines if two values are not the same. + /// The left value to compare. + /// The right value to compare. + public static bool operator !=(OperationState left, OperationState right) => !left.Equals(right); + + /// Converts a string to a . + /// The value. + public static implicit operator OperationState(string value) => new OperationState(value); + + /// Converts a string to a . + /// The value. + public static implicit operator OperationState?(string value) => value == null ? null : new OperationState(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is OperationState other && Equals(other); + + /// + public bool Equals(OperationState other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + + /// + public override string ToString() => _value; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/PagedContentAnalyzer.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/PagedContentAnalyzer.Serialization.cs new file mode 100644 index 000000000000..9e7fd3cdffc2 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/PagedContentAnalyzer.Serialization.cs @@ -0,0 +1,176 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure; + +namespace Azure.AI.ContentUnderstanding +{ + /// Paged collection of ContentAnalyzer items. + internal partial class PagedContentAnalyzer : IJsonModel + { + /// Initializes a new instance of for deserialization. + internal PagedContentAnalyzer() + { + } + + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(PagedContentAnalyzer)} does not support writing '{format}' format."); + } + writer.WritePropertyName("value"u8); + writer.WriteStartArray(); + foreach (ContentAnalyzer item in Value) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + if (Optional.IsDefined(NextLink)) + { + writer.WritePropertyName("nextLink"u8); + writer.WriteStringValue(NextLink.AbsoluteUri); + } + if (options.Format != "W" && _additionalBinaryDataProperties != null) + { + foreach (var item in _additionalBinaryDataProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + PagedContentAnalyzer IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected virtual PagedContentAnalyzer JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(PagedContentAnalyzer)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializePagedContentAnalyzer(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static PagedContentAnalyzer DeserializePagedContentAnalyzer(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IList value = default; + Uri nextLink = default; + Guid? clientRequestId = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("value"u8)) + { + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(ContentAnalyzer.DeserializeContentAnalyzer(item, options)); + } + value = array; + continue; + } + if (prop.NameEquals("nextLink"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + nextLink = new Uri(prop.Value.GetString()); + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new PagedContentAnalyzer(value, nextLink, clientRequestId, additionalBinaryDataProperties); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(PagedContentAnalyzer)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + PagedContentAnalyzer IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected virtual PagedContentAnalyzer PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializePagedContentAnalyzer(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(PagedContentAnalyzer)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// The to deserialize the from. + public static explicit operator PagedContentAnalyzer(Response response) + { + using JsonDocument document = JsonDocument.Parse(response.Content, ModelSerializationExtensions.JsonDocumentOptions); + return DeserializePagedContentAnalyzer(document.RootElement, ModelSerializationExtensions.WireOptions); + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/PagedContentAnalyzer.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/PagedContentAnalyzer.cs new file mode 100644 index 000000000000..31b30a5af5fc --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/PagedContentAnalyzer.cs @@ -0,0 +1,49 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.AI.ContentUnderstanding +{ + /// Paged collection of ContentAnalyzer items. + internal partial class PagedContentAnalyzer + { + /// Keeps track of any properties unknown to the library. + private protected readonly IDictionary _additionalBinaryDataProperties; + + /// Initializes a new instance of . + /// The ContentAnalyzer items on this page. + internal PagedContentAnalyzer(IEnumerable value) + { + Value = value.ToList(); + } + + /// Initializes a new instance of . + /// The ContentAnalyzer items on this page. + /// The link to the next page of items. + /// An opaque, globally-unique, client-generated string identifier for the request. + /// Keeps track of any properties unknown to the library. + internal PagedContentAnalyzer(IList value, Uri nextLink, Guid? clientRequestId, IDictionary additionalBinaryDataProperties) + { + Value = value; + NextLink = nextLink; + ClientRequestId = clientRequestId; + _additionalBinaryDataProperties = additionalBinaryDataProperties; + } + + /// The ContentAnalyzer items on this page. + public IList Value { get; } + + /// The link to the next page of items. + public Uri NextLink { get; } + + /// An opaque, globally-unique, client-generated string identifier for the request. + public Guid? ClientRequestId { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ProcessingLocation.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ProcessingLocation.cs new file mode 100644 index 000000000000..57610429cc6a --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/ProcessingLocation.cs @@ -0,0 +1,75 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.AI.ContentUnderstanding +{ + /// The location where the data may be processed. + public readonly partial struct ProcessingLocation : IEquatable + { + private readonly string _value; + /// Data may be processed in the same geography as the resource. + private const string GeographyValue = "geography"; + /// Data may be processed in the same data zone as the resource. + private const string DataZoneValue = "dataZone"; + /// Data may be processed in any Azure data center globally. + private const string GlobalValue = "global"; + + /// Initializes a new instance of . + /// The value. + /// is null. + public ProcessingLocation(string value) + { + Argument.AssertNotNull(value, nameof(value)); + + _value = value; + } + + /// Data may be processed in the same geography as the resource. + public static ProcessingLocation Geography { get; } = new ProcessingLocation(GeographyValue); + + /// Data may be processed in the same data zone as the resource. + public static ProcessingLocation DataZone { get; } = new ProcessingLocation(DataZoneValue); + + /// Data may be processed in any Azure data center globally. + public static ProcessingLocation Global { get; } = new ProcessingLocation(GlobalValue); + + /// Determines if two values are the same. + /// The left value to compare. + /// The right value to compare. + public static bool operator ==(ProcessingLocation left, ProcessingLocation right) => left.Equals(right); + + /// Determines if two values are not the same. + /// The left value to compare. + /// The right value to compare. + public static bool operator !=(ProcessingLocation left, ProcessingLocation right) => !left.Equals(right); + + /// Converts a string to a . + /// The value. + public static implicit operator ProcessingLocation(string value) => new ProcessingLocation(value); + + /// Converts a string to a . + /// The value. + public static implicit operator ProcessingLocation?(string value) => value == null ? null : new ProcessingLocation(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is ProcessingLocation other && Equals(other); + + /// + public bool Equals(ProcessingLocation other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + + /// + public override string ToString() => _value; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/SemanticRole.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/SemanticRole.cs new file mode 100644 index 000000000000..24856a1cdb75 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/SemanticRole.cs @@ -0,0 +1,95 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.AI.ContentUnderstanding +{ + /// Semantic role of the paragraph. + public readonly partial struct SemanticRole : IEquatable + { + private readonly string _value; + /// Text near the top edge of the page. + private const string PageHeaderValue = "pageHeader"; + /// Text near the bottom edge of the page. + private const string PageFooterValue = "pageFooter"; + /// Page number. + private const string PageNumberValue = "pageNumber"; + /// Top-level title describing the entire document. + private const string TitleValue = "title"; + /// Sub heading describing a section of the document. + private const string SectionHeadingValue = "sectionHeading"; + /// Note usually placed after the main content on a page. + private const string FootnoteValue = "footnote"; + /// Block of formulas, often with shared alignment. + private const string FormulaBlockValue = "formulaBlock"; + + /// Initializes a new instance of . + /// The value. + /// is null. + public SemanticRole(string value) + { + Argument.AssertNotNull(value, nameof(value)); + + _value = value; + } + + /// Text near the top edge of the page. + public static SemanticRole PageHeader { get; } = new SemanticRole(PageHeaderValue); + + /// Text near the bottom edge of the page. + public static SemanticRole PageFooter { get; } = new SemanticRole(PageFooterValue); + + /// Page number. + public static SemanticRole PageNumber { get; } = new SemanticRole(PageNumberValue); + + /// Top-level title describing the entire document. + public static SemanticRole Title { get; } = new SemanticRole(TitleValue); + + /// Sub heading describing a section of the document. + public static SemanticRole SectionHeading { get; } = new SemanticRole(SectionHeadingValue); + + /// Note usually placed after the main content on a page. + public static SemanticRole Footnote { get; } = new SemanticRole(FootnoteValue); + + /// Block of formulas, often with shared alignment. + public static SemanticRole FormulaBlock { get; } = new SemanticRole(FormulaBlockValue); + + /// Determines if two values are the same. + /// The left value to compare. + /// The right value to compare. + public static bool operator ==(SemanticRole left, SemanticRole right) => left.Equals(right); + + /// Determines if two values are not the same. + /// The left value to compare. + /// The right value to compare. + public static bool operator !=(SemanticRole left, SemanticRole right) => !left.Equals(right); + + /// Converts a string to a . + /// The value. + public static implicit operator SemanticRole(string value) => new SemanticRole(value); + + /// Converts a string to a . + /// The value. + public static implicit operator SemanticRole?(string value) => value == null ? null : new SemanticRole(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is SemanticRole other && Equals(other); + + /// + public bool Equals(SemanticRole other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + + /// + public override string ToString() => _value; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/StringField.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/StringField.Serialization.cs new file mode 100644 index 000000000000..891929ca067f --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/StringField.Serialization.cs @@ -0,0 +1,178 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// String field extracted from the content. + public partial class StringField : ContentField, IJsonModel + { + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(StringField)} does not support writing '{format}' format."); + } + base.JsonModelWriteCore(writer, options); + writer.WritePropertyName("type"u8); + writer.WriteStringValue(FieldType.ToString()); + if (Optional.IsDefined(ValueString)) + { + writer.WritePropertyName("valueString"u8); + writer.WriteStringValue(ValueString); + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + StringField IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => (StringField)JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected override ContentField JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(StringField)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeStringField(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static StringField DeserializeStringField(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + ContentFieldType @type = default; + IList spans = default; + float? confidence = default; + string source = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + ContentFieldType fieldType = default; + string valueString = default; + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("type"u8)) + { + @type = new ContentFieldType(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("spans"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(ContentSpan.DeserializeContentSpan(item, options)); + } + spans = array; + continue; + } + if (prop.NameEquals("confidence"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + confidence = prop.Value.GetSingle(); + continue; + } + if (prop.NameEquals("source"u8)) + { + source = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("type"u8)) + { + fieldType = new ContentFieldType(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("valueString"u8)) + { + valueString = prop.Value.GetString(); + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new StringField( + @type, + spans ?? new ChangeTrackingList(), + confidence, + source, + additionalBinaryDataProperties, + fieldType, + valueString); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(StringField)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + StringField IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => (StringField)PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected override ContentField PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeStringField(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(StringField)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/StringField.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/StringField.cs new file mode 100644 index 000000000000..8fbed69a60cc --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/StringField.cs @@ -0,0 +1,41 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + /// String field extracted from the content. + public partial class StringField : ContentField + { + /// Initializes a new instance of . + internal StringField() : base(ContentFieldType.String) + { + } + + /// Initializes a new instance of . + /// Semantic data type of the field value. + /// Span(s) associated with the field value in the markdown content. + /// Confidence of predicting the field value. + /// Encoded source that identifies the position of the field value in the content. + /// Keeps track of any properties unknown to the library. + /// Semantic data type of the field value. + /// String field value. + internal StringField(ContentFieldType @type, IList spans, float? confidence, string source, IDictionary additionalBinaryDataProperties, ContentFieldType fieldType, string valueString) : base(@type, spans, confidence, source, additionalBinaryDataProperties) + { + FieldType = fieldType; + ValueString = valueString; + } + + /// Semantic data type of the field value. + internal ContentFieldType FieldType { get; set; } = "string"; + + /// String field value. + public string ValueString { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/SupportedModels.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/SupportedModels.Serialization.cs new file mode 100644 index 000000000000..06b617e10a0c --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/SupportedModels.Serialization.cs @@ -0,0 +1,194 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// Chat completion and embedding models supported by the analyzer. + public partial class SupportedModels : IJsonModel + { + /// Initializes a new instance of for deserialization. + internal SupportedModels() + { + } + + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SupportedModels)} does not support writing '{format}' format."); + } + writer.WritePropertyName("completion"u8); + writer.WriteStartArray(); + foreach (string item in Completion) + { + if (item == null) + { + writer.WriteNullValue(); + continue; + } + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + writer.WritePropertyName("embedding"u8); + writer.WriteStartArray(); + foreach (string item in Embedding) + { + if (item == null) + { + writer.WriteNullValue(); + continue; + } + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + if (options.Format != "W" && _additionalBinaryDataProperties != null) + { + foreach (var item in _additionalBinaryDataProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + SupportedModels IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected virtual SupportedModels JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(SupportedModels)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeSupportedModels(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static SupportedModels DeserializeSupportedModels(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IList completion = default; + IList embedding = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("completion"u8)) + { + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + if (item.ValueKind == JsonValueKind.Null) + { + array.Add(null); + } + else + { + array.Add(item.GetString()); + } + } + completion = array; + continue; + } + if (prop.NameEquals("embedding"u8)) + { + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + if (item.ValueKind == JsonValueKind.Null) + { + array.Add(null); + } + else + { + array.Add(item.GetString()); + } + } + embedding = array; + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new SupportedModels(completion, embedding, additionalBinaryDataProperties); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(SupportedModels)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + SupportedModels IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected virtual SupportedModels PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeSupportedModels(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(SupportedModels)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/SupportedModels.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/SupportedModels.cs new file mode 100644 index 000000000000..c44d5ddddc95 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/SupportedModels.cs @@ -0,0 +1,46 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.AI.ContentUnderstanding +{ + /// Chat completion and embedding models supported by the analyzer. + public partial class SupportedModels + { + /// Keeps track of any properties unknown to the library. + private protected readonly IDictionary _additionalBinaryDataProperties; + + /// Initializes a new instance of . + /// Chat completion models supported by the analyzer. + /// Embedding models supported by the analyzer. + internal SupportedModels(IEnumerable completion, IEnumerable embedding) + { + Completion = completion.ToList(); + Embedding = embedding.ToList(); + } + + /// Initializes a new instance of . + /// Chat completion models supported by the analyzer. + /// Embedding models supported by the analyzer. + /// Keeps track of any properties unknown to the library. + internal SupportedModels(IList completion, IList embedding, IDictionary additionalBinaryDataProperties) + { + Completion = completion; + Embedding = embedding; + _additionalBinaryDataProperties = additionalBinaryDataProperties; + } + + /// Chat completion models supported by the analyzer. + public IList Completion { get; } + + /// Embedding models supported by the analyzer. + public IList Embedding { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/TableFormat.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/TableFormat.cs new file mode 100644 index 000000000000..0b70f68ba47b --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/TableFormat.cs @@ -0,0 +1,70 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.AI.ContentUnderstanding +{ + /// Representation format of tables in analyze result markdown. + public readonly partial struct TableFormat : IEquatable + { + private readonly string _value; + /// Represent tables using HTML table elements: \<table>, \<th>, \<tr>, \<td>. + private const string HtmlValue = "html"; + /// Represent tables using GitHub Flavored Markdown table syntax, which does not support merged cells or rich headers. + private const string MarkdownValue = "markdown"; + + /// Initializes a new instance of . + /// The value. + /// is null. + public TableFormat(string value) + { + Argument.AssertNotNull(value, nameof(value)); + + _value = value; + } + + /// Represent tables using HTML table elements: \<table>, \<th>, \<tr>, \<td>. + public static TableFormat Html { get; } = new TableFormat(HtmlValue); + + /// Represent tables using GitHub Flavored Markdown table syntax, which does not support merged cells or rich headers. + public static TableFormat Markdown { get; } = new TableFormat(MarkdownValue); + + /// Determines if two values are the same. + /// The left value to compare. + /// The right value to compare. + public static bool operator ==(TableFormat left, TableFormat right) => left.Equals(right); + + /// Determines if two values are not the same. + /// The left value to compare. + /// The right value to compare. + public static bool operator !=(TableFormat left, TableFormat right) => !left.Equals(right); + + /// Converts a string to a . + /// The value. + public static implicit operator TableFormat(string value) => new TableFormat(value); + + /// Converts a string to a . + /// The value. + public static implicit operator TableFormat?(string value) => value == null ? null : new TableFormat(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is TableFormat other && Equals(other); + + /// + public bool Equals(TableFormat other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + + /// + public override string ToString() => _value; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/TimeField.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/TimeField.Serialization.cs new file mode 100644 index 000000000000..f87bda6e357a --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/TimeField.Serialization.cs @@ -0,0 +1,182 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// Time field extracted from the content. + public partial class TimeField : ContentField, IJsonModel + { + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(TimeField)} does not support writing '{format}' format."); + } + base.JsonModelWriteCore(writer, options); + writer.WritePropertyName("type"u8); + writer.WriteStringValue(FieldType.ToString()); + if (Optional.IsDefined(ValueTime)) + { + writer.WritePropertyName("valueTime"u8); + writer.WriteStringValue(ValueTime.Value, "T"); + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + TimeField IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => (TimeField)JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected override ContentField JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(TimeField)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeTimeField(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static TimeField DeserializeTimeField(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + ContentFieldType @type = default; + IList spans = default; + float? confidence = default; + string source = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + ContentFieldType fieldType = default; + TimeSpan? valueTime = default; + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("type"u8)) + { + @type = new ContentFieldType(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("spans"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(ContentSpan.DeserializeContentSpan(item, options)); + } + spans = array; + continue; + } + if (prop.NameEquals("confidence"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + confidence = prop.Value.GetSingle(); + continue; + } + if (prop.NameEquals("source"u8)) + { + source = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("type"u8)) + { + fieldType = new ContentFieldType(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("valueTime"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + valueTime = prop.Value.GetTimeSpan("T"); + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new TimeField( + @type, + spans ?? new ChangeTrackingList(), + confidence, + source, + additionalBinaryDataProperties, + fieldType, + valueTime); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(TimeField)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + TimeField IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => (TimeField)PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected override ContentField PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeTimeField(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(TimeField)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/TimeField.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/TimeField.cs new file mode 100644 index 000000000000..d257c2a5b048 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/TimeField.cs @@ -0,0 +1,41 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + /// Time field extracted from the content. + public partial class TimeField : ContentField + { + /// Initializes a new instance of . + internal TimeField() : base(ContentFieldType.Time) + { + } + + /// Initializes a new instance of . + /// Semantic data type of the field value. + /// Span(s) associated with the field value in the markdown content. + /// Confidence of predicting the field value. + /// Encoded source that identifies the position of the field value in the content. + /// Keeps track of any properties unknown to the library. + /// Semantic data type of the field value. + /// Time field value, in ISO 8601 (hh:mm:ss) format. + internal TimeField(ContentFieldType @type, IList spans, float? confidence, string source, IDictionary additionalBinaryDataProperties, ContentFieldType fieldType, TimeSpan? valueTime) : base(@type, spans, confidence, source, additionalBinaryDataProperties) + { + FieldType = fieldType; + ValueTime = valueTime; + } + + /// Semantic data type of the field value. + internal ContentFieldType FieldType { get; set; } = "time"; + + /// Time field value, in ISO 8601 (hh:mm:ss) format. + public TimeSpan? ValueTime { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/TranscriptPhrase.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/TranscriptPhrase.Serialization.cs new file mode 100644 index 000000000000..454960aabc4f --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/TranscriptPhrase.Serialization.cs @@ -0,0 +1,237 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// Transcript phrase. + public partial class TranscriptPhrase : IJsonModel + { + /// Initializes a new instance of for deserialization. + internal TranscriptPhrase() + { + } + + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(TranscriptPhrase)} does not support writing '{format}' format."); + } + if (Optional.IsDefined(Speaker)) + { + writer.WritePropertyName("speaker"u8); + writer.WriteStringValue(Speaker); + } + writer.WritePropertyName("startTimeMs"u8); + writer.WriteNumberValue(StartTimeMs); + writer.WritePropertyName("endTimeMs"u8); + writer.WriteNumberValue(EndTimeMs); + if (Optional.IsDefined(Locale)) + { + writer.WritePropertyName("locale"u8); + writer.WriteStringValue(Locale); + } + writer.WritePropertyName("text"u8); + writer.WriteStringValue(Text); + if (Optional.IsDefined(Confidence)) + { + writer.WritePropertyName("confidence"u8); + writer.WriteNumberValue(Confidence.Value); + } + if (Optional.IsDefined(Span)) + { + writer.WritePropertyName("span"u8); + writer.WriteObjectValue(Span, options); + } + writer.WritePropertyName("words"u8); + writer.WriteStartArray(); + foreach (TranscriptWord item in Words) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + if (options.Format != "W" && _additionalBinaryDataProperties != null) + { + foreach (var item in _additionalBinaryDataProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + TranscriptPhrase IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected virtual TranscriptPhrase JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(TranscriptPhrase)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeTranscriptPhrase(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static TranscriptPhrase DeserializeTranscriptPhrase(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string speaker = default; + long startTimeMs = default; + long endTimeMs = default; + string locale = default; + string text = default; + float? confidence = default; + ContentSpan span = default; + IList words = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("speaker"u8)) + { + speaker = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("startTimeMs"u8)) + { + startTimeMs = prop.Value.GetInt64(); + continue; + } + if (prop.NameEquals("endTimeMs"u8)) + { + endTimeMs = prop.Value.GetInt64(); + continue; + } + if (prop.NameEquals("locale"u8)) + { + locale = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("text"u8)) + { + text = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("confidence"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + confidence = prop.Value.GetSingle(); + continue; + } + if (prop.NameEquals("span"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + span = ContentSpan.DeserializeContentSpan(prop.Value, options); + continue; + } + if (prop.NameEquals("words"u8)) + { + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(TranscriptWord.DeserializeTranscriptWord(item, options)); + } + words = array; + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new TranscriptPhrase( + speaker, + startTimeMs, + endTimeMs, + locale, + text, + confidence, + span, + words, + additionalBinaryDataProperties); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(TranscriptPhrase)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + TranscriptPhrase IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected virtual TranscriptPhrase PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeTranscriptPhrase(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(TranscriptPhrase)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/TranscriptPhrase.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/TranscriptPhrase.cs new file mode 100644 index 000000000000..5a620ff57d5c --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/TranscriptPhrase.cs @@ -0,0 +1,80 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.AI.ContentUnderstanding +{ + /// Transcript phrase. + public partial class TranscriptPhrase + { + /// Keeps track of any properties unknown to the library. + private protected readonly IDictionary _additionalBinaryDataProperties; + + /// Initializes a new instance of . + /// Start time of the phrase in milliseconds. + /// End time of the phrase in milliseconds. + /// Transcript text. + /// List of words in the phrase. + internal TranscriptPhrase(long startTimeMs, long endTimeMs, string text, IEnumerable words) + { + StartTimeMs = startTimeMs; + EndTimeMs = endTimeMs; + Text = text; + Words = words.ToList(); + } + + /// Initializes a new instance of . + /// Speaker index or name. + /// Start time of the phrase in milliseconds. + /// End time of the phrase in milliseconds. + /// Detected locale of the phrase. Ex. en-US. + /// Transcript text. + /// Confidence of predicting the phrase. + /// Span of the phrase in the markdown content. + /// List of words in the phrase. + /// Keeps track of any properties unknown to the library. + internal TranscriptPhrase(string speaker, long startTimeMs, long endTimeMs, string locale, string text, float? confidence, ContentSpan span, IList words, IDictionary additionalBinaryDataProperties) + { + Speaker = speaker; + StartTimeMs = startTimeMs; + EndTimeMs = endTimeMs; + Locale = locale; + Text = text; + Confidence = confidence; + Span = span; + Words = words; + _additionalBinaryDataProperties = additionalBinaryDataProperties; + } + + /// Speaker index or name. + public string Speaker { get; } + + /// Start time of the phrase in milliseconds. + public long StartTimeMs { get; } + + /// End time of the phrase in milliseconds. + public long EndTimeMs { get; } + + /// Detected locale of the phrase. Ex. en-US. + public string Locale { get; } + + /// Transcript text. + public string Text { get; } + + /// Confidence of predicting the phrase. + public float? Confidence { get; } + + /// Span of the phrase in the markdown content. + public ContentSpan Span { get; } + + /// List of words in the phrase. + public IList Words { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/TranscriptWord.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/TranscriptWord.Serialization.cs new file mode 100644 index 000000000000..51d25e0a6564 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/TranscriptWord.Serialization.cs @@ -0,0 +1,173 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// Transcript word. + public partial class TranscriptWord : IJsonModel + { + /// Initializes a new instance of for deserialization. + internal TranscriptWord() + { + } + + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(TranscriptWord)} does not support writing '{format}' format."); + } + writer.WritePropertyName("startTimeMs"u8); + writer.WriteNumberValue(StartTimeMs); + writer.WritePropertyName("endTimeMs"u8); + writer.WriteNumberValue(EndTimeMs); + writer.WritePropertyName("text"u8); + writer.WriteStringValue(Text); + if (Optional.IsDefined(Span)) + { + writer.WritePropertyName("span"u8); + writer.WriteObjectValue(Span, options); + } + if (options.Format != "W" && _additionalBinaryDataProperties != null) + { + foreach (var item in _additionalBinaryDataProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + TranscriptWord IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected virtual TranscriptWord JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(TranscriptWord)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeTranscriptWord(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static TranscriptWord DeserializeTranscriptWord(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + long startTimeMs = default; + long endTimeMs = default; + string text = default; + ContentSpan span = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("startTimeMs"u8)) + { + startTimeMs = prop.Value.GetInt64(); + continue; + } + if (prop.NameEquals("endTimeMs"u8)) + { + endTimeMs = prop.Value.GetInt64(); + continue; + } + if (prop.NameEquals("text"u8)) + { + text = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("span"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + span = ContentSpan.DeserializeContentSpan(prop.Value, options); + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new TranscriptWord(startTimeMs, endTimeMs, text, span, additionalBinaryDataProperties); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(TranscriptWord)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + TranscriptWord IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected virtual TranscriptWord PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeTranscriptWord(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(TranscriptWord)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/TranscriptWord.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/TranscriptWord.cs new file mode 100644 index 000000000000..33e753afec3c --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/TranscriptWord.cs @@ -0,0 +1,57 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + /// Transcript word. + public partial class TranscriptWord + { + /// Keeps track of any properties unknown to the library. + private protected readonly IDictionary _additionalBinaryDataProperties; + + /// Initializes a new instance of . + /// Start time of the word in milliseconds. + /// End time of the word in milliseconds. + /// Transcript text. + internal TranscriptWord(long startTimeMs, long endTimeMs, string text) + { + StartTimeMs = startTimeMs; + EndTimeMs = endTimeMs; + Text = text; + } + + /// Initializes a new instance of . + /// Start time of the word in milliseconds. + /// End time of the word in milliseconds. + /// Transcript text. + /// Span of the word in the markdown content. + /// Keeps track of any properties unknown to the library. + internal TranscriptWord(long startTimeMs, long endTimeMs, string text, ContentSpan span, IDictionary additionalBinaryDataProperties) + { + StartTimeMs = startTimeMs; + EndTimeMs = endTimeMs; + Text = text; + Span = span; + _additionalBinaryDataProperties = additionalBinaryDataProperties; + } + + /// Start time of the word in milliseconds. + public long StartTimeMs { get; } + + /// End time of the word in milliseconds. + public long EndTimeMs { get; } + + /// Transcript text. + public string Text { get; } + + /// Span of the word in the markdown content. + public ContentSpan Span { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/UnknownContentField.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/UnknownContentField.Serialization.cs new file mode 100644 index 000000000000..81b3fba35128 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/UnknownContentField.Serialization.cs @@ -0,0 +1,156 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + internal partial class UnknownContentField : ContentField, IJsonModel + { + /// Initializes a new instance of for deserialization. + internal UnknownContentField() + { + } + + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ContentField)} does not support writing '{format}' format."); + } + base.JsonModelWriteCore(writer, options); + } + + /// The JSON reader. + /// The client options for reading and writing models. + ContentField IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected override ContentField JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ContentField)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeContentField(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static UnknownContentField DeserializeUnknownContentField(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + ContentFieldType @type = default; + IList spans = default; + float? confidence = default; + string source = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("type"u8)) + { + @type = new ContentFieldType(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("spans"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(ContentSpan.DeserializeContentSpan(item, options)); + } + spans = array; + continue; + } + if (prop.NameEquals("confidence"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + confidence = prop.Value.GetSingle(); + continue; + } + if (prop.NameEquals("source"u8)) + { + source = prop.Value.GetString(); + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new UnknownContentField(@type, spans ?? new ChangeTrackingList(), confidence, source, additionalBinaryDataProperties); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(ContentField)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + ContentField IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected override ContentField PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeContentField(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(ContentField)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/UnknownContentField.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/UnknownContentField.cs new file mode 100644 index 000000000000..41abba2dab0e --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/UnknownContentField.cs @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + internal partial class UnknownContentField : ContentField + { + /// Initializes a new instance of . + /// Semantic data type of the field value. + /// Span(s) associated with the field value in the markdown content. + /// Confidence of predicting the field value. + /// Encoded source that identifies the position of the field value in the content. + /// Keeps track of any properties unknown to the library. + internal UnknownContentField(ContentFieldType @type, IList spans, float? confidence, string source, IDictionary additionalBinaryDataProperties) : base(@type != default ? @type : "unknown", spans, confidence, source, additionalBinaryDataProperties) + { + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/UnknownDocumentFigure.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/UnknownDocumentFigure.Serialization.cs new file mode 100644 index 000000000000..58bd2b7420a4 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/UnknownDocumentFigure.Serialization.cs @@ -0,0 +1,220 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + internal partial class UnknownDocumentFigure : DocumentFigure, IJsonModel + { + /// Initializes a new instance of for deserialization. + internal UnknownDocumentFigure() + { + } + + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentFigure)} does not support writing '{format}' format."); + } + base.JsonModelWriteCore(writer, options); + } + + /// The JSON reader. + /// The client options for reading and writing models. + DocumentFigure IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected override DocumentFigure JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentFigure)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeDocumentFigure(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static UnknownDocumentFigure DeserializeUnknownDocumentFigure(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + DocumentFigureKind kind = default; + string id = default; + string source = default; + ContentSpan span = default; + IList elements = default; + DocumentCaption caption = default; + IList footnotes = default; + string description = default; + SemanticRole? role = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("kind"u8)) + { + kind = new DocumentFigureKind(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("id"u8)) + { + id = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("source"u8)) + { + source = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("span"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + span = ContentSpan.DeserializeContentSpan(prop.Value, options); + continue; + } + if (prop.NameEquals("elements"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + if (item.ValueKind == JsonValueKind.Null) + { + array.Add(null); + } + else + { + array.Add(item.GetString()); + } + } + elements = array; + continue; + } + if (prop.NameEquals("caption"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + caption = DocumentCaption.DeserializeDocumentCaption(prop.Value, options); + continue; + } + if (prop.NameEquals("footnotes"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(DocumentFootnote.DeserializeDocumentFootnote(item, options)); + } + footnotes = array; + continue; + } + if (prop.NameEquals("description"u8)) + { + description = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("role"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + role = new SemanticRole(prop.Value.GetString()); + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new UnknownDocumentFigure( + kind, + id, + source, + span, + elements ?? new ChangeTrackingList(), + caption, + footnotes ?? new ChangeTrackingList(), + description, + role, + additionalBinaryDataProperties); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(DocumentFigure)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + DocumentFigure IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected override DocumentFigure PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeDocumentFigure(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(DocumentFigure)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/UnknownDocumentFigure.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/UnknownDocumentFigure.cs new file mode 100644 index 000000000000..6170f5defb4a --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/UnknownDocumentFigure.cs @@ -0,0 +1,30 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + internal partial class UnknownDocumentFigure : DocumentFigure + { + /// Initializes a new instance of . + /// Figure kind. + /// Figure identifier. + /// Encoded source that identifies the position of the figure in the content. + /// Span of the figure in the markdown content. + /// Child elements of the figure, excluding any caption or footnotes. + /// Figure caption. + /// List of figure footnotes. + /// Description of the figure. + /// Semantic role of the figure. + /// Keeps track of any properties unknown to the library. + internal UnknownDocumentFigure(DocumentFigureKind kind, string id, string source, ContentSpan span, IList elements, DocumentCaption caption, IList footnotes, string description, SemanticRole? role, IDictionary additionalBinaryDataProperties) : base(kind != default ? kind : "unknown", id, source, span, elements, caption, footnotes, description, role, additionalBinaryDataProperties) + { + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/UnknownKnowledgeSource.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/UnknownKnowledgeSource.Serialization.cs new file mode 100644 index 000000000000..c12c72b9511d --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/UnknownKnowledgeSource.Serialization.cs @@ -0,0 +1,125 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + internal partial class UnknownKnowledgeSource : KnowledgeSource, IJsonModel + { + /// Initializes a new instance of for deserialization. + internal UnknownKnowledgeSource() + { + } + + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(KnowledgeSource)} does not support writing '{format}' format."); + } + base.JsonModelWriteCore(writer, options); + } + + /// The JSON reader. + /// The client options for reading and writing models. + KnowledgeSource IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected override KnowledgeSource JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(KnowledgeSource)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeKnowledgeSource(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static UnknownKnowledgeSource DeserializeUnknownKnowledgeSource(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + KnowledgeSourceKind kind = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("kind"u8)) + { + kind = new KnowledgeSourceKind(prop.Value.GetString()); + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new UnknownKnowledgeSource(kind, additionalBinaryDataProperties); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(KnowledgeSource)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + KnowledgeSource IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected override KnowledgeSource PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeKnowledgeSource(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(KnowledgeSource)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/UnknownKnowledgeSource.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/UnknownKnowledgeSource.cs new file mode 100644 index 000000000000..6b0c329d5765 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/UnknownKnowledgeSource.cs @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + internal partial class UnknownKnowledgeSource : KnowledgeSource + { + /// Initializes a new instance of . + /// The kind of knowledge source. + /// Keeps track of any properties unknown to the library. + internal UnknownKnowledgeSource(KnowledgeSourceKind kind, IDictionary additionalBinaryDataProperties) : base(kind != default ? kind : "unknown", additionalBinaryDataProperties) + { + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/UnknownMediaContent.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/UnknownMediaContent.Serialization.cs new file mode 100644 index 000000000000..2529204d81a2 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/UnknownMediaContent.Serialization.cs @@ -0,0 +1,178 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + internal partial class UnknownMediaContent : MediaContent, IJsonModel + { + /// Initializes a new instance of for deserialization. + internal UnknownMediaContent() + { + } + + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(MediaContent)} does not support writing '{format}' format."); + } + base.JsonModelWriteCore(writer, options); + } + + /// The JSON reader. + /// The client options for reading and writing models. + MediaContent IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected override MediaContent JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(MediaContent)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeMediaContent(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static UnknownMediaContent DeserializeUnknownMediaContent(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + MediaContentKind kind = default; + string mimeType = default; + string analyzerId = default; + string category = default; + string path = default; + string markdown = default; + IDictionary fields = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("kind"u8)) + { + kind = new MediaContentKind(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("mimeType"u8)) + { + mimeType = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("analyzerId"u8)) + { + analyzerId = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("category"u8)) + { + category = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("path"u8)) + { + path = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("markdown"u8)) + { + markdown = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("fields"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + Dictionary dictionary = new Dictionary(); + foreach (var prop0 in prop.Value.EnumerateObject()) + { + dictionary.Add(prop0.Name, ContentField.DeserializeContentField(prop0.Value, options)); + } + fields = dictionary; + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new UnknownMediaContent( + kind, + mimeType, + analyzerId, + category, + path, + markdown, + fields ?? new ChangeTrackingDictionary(), + additionalBinaryDataProperties); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(MediaContent)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + MediaContent IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected override MediaContent PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeMediaContent(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(MediaContent)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/UnknownMediaContent.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/UnknownMediaContent.cs new file mode 100644 index 000000000000..46a47a7f4402 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/UnknownMediaContent.cs @@ -0,0 +1,28 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + internal partial class UnknownMediaContent : MediaContent + { + /// Initializes a new instance of . + /// Content kind. + /// Detected MIME type of the content. Ex. application/pdf, image/jpeg, etc. + /// The analyzer that generated this content. + /// Classified content category. + /// The path of the content in the input. + /// Markdown representation of the content. + /// Extracted fields from the content. + /// Keeps track of any properties unknown to the library. + internal UnknownMediaContent(MediaContentKind kind, string mimeType, string analyzerId, string category, string path, string markdown, IDictionary fields, IDictionary additionalBinaryDataProperties) : base(kind != default ? kind : "unknown", mimeType, analyzerId, category, path, markdown, fields, additionalBinaryDataProperties) + { + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/UsageDetails.Serialization.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/UsageDetails.Serialization.cs new file mode 100644 index 000000000000..990099db49f0 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/UsageDetails.Serialization.cs @@ -0,0 +1,253 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.ContentUnderstanding +{ + /// Usage details. + internal partial class UsageDetails : IJsonModel + { + /// The JSON writer. + /// The client options for reading and writing models. + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + /// The JSON writer. + /// The client options for reading and writing models. + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(UsageDetails)} does not support writing '{format}' format."); + } + if (Optional.IsDefined(DocumentPagesMinimal)) + { + writer.WritePropertyName("documentPagesMinimal"u8); + writer.WriteNumberValue(DocumentPagesMinimal.Value); + } + if (Optional.IsDefined(DocumentPagesBasic)) + { + writer.WritePropertyName("documentPagesBasic"u8); + writer.WriteNumberValue(DocumentPagesBasic.Value); + } + if (Optional.IsDefined(DocumentPagesStandard)) + { + writer.WritePropertyName("documentPagesStandard"u8); + writer.WriteNumberValue(DocumentPagesStandard.Value); + } + if (Optional.IsDefined(AudioHours)) + { + writer.WritePropertyName("audioHours"u8); + writer.WriteNumberValue(AudioHours.Value); + } + if (Optional.IsDefined(VideoHours)) + { + writer.WritePropertyName("videoHours"u8); + writer.WriteNumberValue(VideoHours.Value); + } + if (Optional.IsDefined(ContextualizationTokens)) + { + writer.WritePropertyName("contextualizationTokens"u8); + writer.WriteNumberValue(ContextualizationTokens.Value); + } + if (Optional.IsCollectionDefined(Tokens)) + { + writer.WritePropertyName("tokens"u8); + writer.WriteStartObject(); + foreach (var item in Tokens) + { + writer.WritePropertyName(item.Key); + writer.WriteNumberValue(item.Value); + } + writer.WriteEndObject(); + } + if (options.Format != "W" && _additionalBinaryDataProperties != null) + { + foreach (var item in _additionalBinaryDataProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + } + + /// The JSON reader. + /// The client options for reading and writing models. + UsageDetails IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + /// The JSON reader. + /// The client options for reading and writing models. + protected virtual UsageDetails JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(UsageDetails)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeUsageDetails(document.RootElement, options); + } + + /// The JSON element to deserialize. + /// The client options for reading and writing models. + internal static UsageDetails DeserializeUsageDetails(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + int? documentPagesMinimal = default; + int? documentPagesBasic = default; + int? documentPagesStandard = default; + float? audioHours = default; + float? videoHours = default; + int? contextualizationTokens = default; + IDictionary tokens = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("documentPagesMinimal"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + documentPagesMinimal = prop.Value.GetInt32(); + continue; + } + if (prop.NameEquals("documentPagesBasic"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + documentPagesBasic = prop.Value.GetInt32(); + continue; + } + if (prop.NameEquals("documentPagesStandard"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + documentPagesStandard = prop.Value.GetInt32(); + continue; + } + if (prop.NameEquals("audioHours"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + audioHours = prop.Value.GetSingle(); + continue; + } + if (prop.NameEquals("videoHours"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + videoHours = prop.Value.GetSingle(); + continue; + } + if (prop.NameEquals("contextualizationTokens"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + contextualizationTokens = prop.Value.GetInt32(); + continue; + } + if (prop.NameEquals("tokens"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + Dictionary dictionary = new Dictionary(); + foreach (var prop0 in prop.Value.EnumerateObject()) + { + dictionary.Add(prop0.Name, prop0.Value.GetInt32()); + } + tokens = dictionary; + continue; + } + if (options.Format != "W") + { + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + } + return new UsageDetails( + documentPagesMinimal, + documentPagesBasic, + documentPagesStandard, + audioHours, + videoHours, + contextualizationTokens, + tokens ?? new ChangeTrackingDictionary(), + additionalBinaryDataProperties); + } + + /// The client options for reading and writing models. + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + /// The client options for reading and writing models. + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, AzureAIContentUnderstandingContext.Default); + default: + throw new FormatException($"The model {nameof(UsageDetails)} does not support writing '{options.Format}' format."); + } + } + + /// The data to parse. + /// The client options for reading and writing models. + UsageDetails IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + /// The data to parse. + /// The client options for reading and writing models. + protected virtual UsageDetails PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeUsageDetails(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(UsageDetails)} does not support reading '{options.Format}' format."); + } + } + + /// The client options for reading and writing models. + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/UsageDetails.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/UsageDetails.cs new file mode 100644 index 000000000000..fd1f5f8b2330 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Generated/Models/UsageDetails.cs @@ -0,0 +1,85 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentUnderstanding +{ + /// Usage details. + internal partial class UsageDetails + { + /// Keeps track of any properties unknown to the library. + private protected readonly IDictionary _additionalBinaryDataProperties; + + /// Initializes a new instance of . + internal UsageDetails() + { + Tokens = new ChangeTrackingDictionary(); + } + + /// Initializes a new instance of . + /// + /// The number of document pages processed at the minimal level. + /// For documents without explicit pages (ex. txt, html), every 3000 UTF-16 characters is counted as one page. + /// + /// + /// The number of document pages processed at the basic level. + /// For documents without explicit pages (ex. txt, html), every 3000 UTF-16 characters is counted as one page. + /// + /// + /// The number of document pages processed at the standard level. + /// For documents without explicit pages (ex. txt, html), every 3000 UTF-16 characters is counted as one page. + /// + /// The hours of audio processed. + /// The hours of video processed. + /// The number of contextualization tokens consumed for preparing context, generating confidence scores, source grounding, and output formatting. + /// The number of LLM and embedding tokens consumed, grouped by model (ex. GTP 4.1) and type (ex. input, cached input, output). + /// Keeps track of any properties unknown to the library. + internal UsageDetails(int? documentPagesMinimal, int? documentPagesBasic, int? documentPagesStandard, float? audioHours, float? videoHours, int? contextualizationTokens, IDictionary tokens, IDictionary additionalBinaryDataProperties) + { + DocumentPagesMinimal = documentPagesMinimal; + DocumentPagesBasic = documentPagesBasic; + DocumentPagesStandard = documentPagesStandard; + AudioHours = audioHours; + VideoHours = videoHours; + ContextualizationTokens = contextualizationTokens; + Tokens = tokens; + _additionalBinaryDataProperties = additionalBinaryDataProperties; + } + + /// + /// The number of document pages processed at the minimal level. + /// For documents without explicit pages (ex. txt, html), every 3000 UTF-16 characters is counted as one page. + /// + public int? DocumentPagesMinimal { get; } + + /// + /// The number of document pages processed at the basic level. + /// For documents without explicit pages (ex. txt, html), every 3000 UTF-16 characters is counted as one page. + /// + public int? DocumentPagesBasic { get; } + + /// + /// The number of document pages processed at the standard level. + /// For documents without explicit pages (ex. txt, html), every 3000 UTF-16 characters is counted as one page. + /// + public int? DocumentPagesStandard { get; } + + /// The hours of audio processed. + public float? AudioHours { get; } + + /// The hours of video processed. + public float? VideoHours { get; } + + /// The number of contextualization tokens consumed for preparing context, generating confidence scores, source grounding, and output formatting. + public int? ContextualizationTokens { get; } + + /// The number of LLM and embedding tokens consumed, grouped by model (ex. GTP 4.1) and type (ex. input, cached input, output). + public IDictionary Tokens { get; } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/ObjectField.Extensions.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/ObjectField.Extensions.cs new file mode 100644 index 000000000000..248788dc3d91 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/ObjectField.Extensions.cs @@ -0,0 +1,33 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +namespace Azure.AI.ContentUnderstanding +{ + /// + /// Extension methods and convenience properties for . + /// + public partial class ObjectField + { + /// + /// Gets a field from the object by name. + /// + /// The name of the field to retrieve. + /// The field if found, or null if not found. + public ContentField this[string fieldName] + { + get + { + if (ValueObject != null && ValueObject.TryGetValue(fieldName, out var field)) + { + return field; + } + return null; + } + } + } +} + diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Properties/AssemblyInfo.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Properties/AssemblyInfo.cs new file mode 100644 index 000000000000..ddc9e5126237 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Properties/AssemblyInfo.cs @@ -0,0 +1,6 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using Azure.Core; + +[assembly: AzureResourceProviderNamespace("Microsoft.CognitiveServices")] diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Suppression.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Suppression.cs new file mode 100644 index 000000000000..d82f114331f8 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/src/Suppression.cs @@ -0,0 +1,26 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System.Diagnostics.CodeAnalysis; + +// Suppress AZC0034 warnings for types that have naming conflicts with FormRecognizer and DocumentIntelligence SDKs. +// These conflicts are expected as Content Understanding is the next iterations of the Document Intelligence service. +// The types are intentionally named to match the service specification and maintain consistency across Azure AI SDKs. +[assembly: SuppressMessage("Design", "AZC0034", Justification = "Type name conflicts with FormRecognizer/DocumentIntelligence are expected. These types match the service specification and maintain consistency across Azure AI SDKs.", Scope = "type", Target = "~T:Azure.AI.ContentUnderstanding.AnalyzeResult")] +[assembly: SuppressMessage("Design", "AZC0034", Justification = "Type name conflicts with FormRecognizer/DocumentIntelligence are expected. These types match the service specification and maintain consistency across Azure AI SDKs.", Scope = "type", Target = "~T:Azure.AI.ContentUnderstanding.CopyAuthorization")] +[assembly: SuppressMessage("Design", "AZC0034", Justification = "Type name conflicts with FormRecognizer/DocumentIntelligence are expected. These types match the service specification and maintain consistency across Azure AI SDKs.", Scope = "type", Target = "~T:Azure.AI.ContentUnderstanding.DocumentBarcode")] +[assembly: SuppressMessage("Design", "AZC0034", Justification = "Type name conflicts with FormRecognizer/DocumentIntelligence are expected. These types match the service specification and maintain consistency across Azure AI SDKs.", Scope = "type", Target = "~T:Azure.AI.ContentUnderstanding.DocumentBarcodeKind")] +[assembly: SuppressMessage("Design", "AZC0034", Justification = "Type name conflicts with FormRecognizer/DocumentIntelligence are expected. These types match the service specification and maintain consistency across Azure AI SDKs.", Scope = "type", Target = "~T:Azure.AI.ContentUnderstanding.DocumentCaption")] +[assembly: SuppressMessage("Design", "AZC0034", Justification = "Type name conflicts with FormRecognizer/DocumentIntelligence are expected. These types match the service specification and maintain consistency across Azure AI SDKs.", Scope = "type", Target = "~T:Azure.AI.ContentUnderstanding.DocumentFigure")] +[assembly: SuppressMessage("Design", "AZC0034", Justification = "Type name conflicts with FormRecognizer/DocumentIntelligence are expected. These types match the service specification and maintain consistency across Azure AI SDKs.", Scope = "type", Target = "~T:Azure.AI.ContentUnderstanding.DocumentFootnote")] +[assembly: SuppressMessage("Design", "AZC0034", Justification = "Type name conflicts with FormRecognizer/DocumentIntelligence are expected. These types match the service specification and maintain consistency across Azure AI SDKs.", Scope = "type", Target = "~T:Azure.AI.ContentUnderstanding.DocumentFormula")] +[assembly: SuppressMessage("Design", "AZC0034", Justification = "Type name conflicts with FormRecognizer/DocumentIntelligence are expected. These types match the service specification and maintain consistency across Azure AI SDKs.", Scope = "type", Target = "~T:Azure.AI.ContentUnderstanding.DocumentFormulaKind")] +[assembly: SuppressMessage("Design", "AZC0034", Justification = "Type name conflicts with FormRecognizer/DocumentIntelligence are expected. These types match the service specification and maintain consistency across Azure AI SDKs.", Scope = "type", Target = "~T:Azure.AI.ContentUnderstanding.DocumentLine")] +[assembly: SuppressMessage("Design", "AZC0034", Justification = "Type name conflicts with FormRecognizer/DocumentIntelligence are expected. These types match the service specification and maintain consistency across Azure AI SDKs.", Scope = "type", Target = "~T:Azure.AI.ContentUnderstanding.DocumentPage")] +[assembly: SuppressMessage("Design", "AZC0034", Justification = "Type name conflicts with FormRecognizer/DocumentIntelligence are expected. These types match the service specification and maintain consistency across Azure AI SDKs.", Scope = "type", Target = "~T:Azure.AI.ContentUnderstanding.DocumentParagraph")] +[assembly: SuppressMessage("Design", "AZC0034", Justification = "Type name conflicts with FormRecognizer/DocumentIntelligence are expected. These types match the service specification and maintain consistency across Azure AI SDKs.", Scope = "type", Target = "~T:Azure.AI.ContentUnderstanding.DocumentSection")] +[assembly: SuppressMessage("Design", "AZC0034", Justification = "Type name conflicts with FormRecognizer/DocumentIntelligence are expected. These types match the service specification and maintain consistency across Azure AI SDKs.", Scope = "type", Target = "~T:Azure.AI.ContentUnderstanding.DocumentTable")] +[assembly: SuppressMessage("Design", "AZC0034", Justification = "Type name conflicts with FormRecognizer/DocumentIntelligence are expected. These types match the service specification and maintain consistency across Azure AI SDKs.", Scope = "type", Target = "~T:Azure.AI.ContentUnderstanding.DocumentTableCell")] +[assembly: SuppressMessage("Design", "AZC0034", Justification = "Type name conflicts with FormRecognizer/DocumentIntelligence are expected. These types match the service specification and maintain consistency across Azure AI SDKs.", Scope = "type", Target = "~T:Azure.AI.ContentUnderstanding.DocumentTableCellKind")] +[assembly: SuppressMessage("Design", "AZC0034", Justification = "Type name conflicts with FormRecognizer/DocumentIntelligence are expected. These types match the service specification and maintain consistency across Azure AI SDKs.", Scope = "type", Target = "~T:Azure.AI.ContentUnderstanding.DocumentWord")] +[assembly: SuppressMessage("Design", "AZC0034", Justification = "Type name conflicts with FormRecognizer/DocumentIntelligence are expected. These types match the service specification and maintain consistency across Azure AI SDKs.", Scope = "type", Target = "~T:Azure.AI.ContentUnderstanding.LengthUnit")] diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/Azure.AI.ContentUnderstanding.Tests.csproj b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/Azure.AI.ContentUnderstanding.Tests.csproj new file mode 100644 index 000000000000..5d01c9ba822a --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/Azure.AI.ContentUnderstanding.Tests.csproj @@ -0,0 +1,30 @@ + + + + $(RequiredTargetFrameworks) + + + + + + + + + + + + + + + + + + + + + PreserveNewest + + + + diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/ContentUnderstandingClientTest.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/ContentUnderstandingClientTest.cs new file mode 100644 index 000000000000..547bd6930b1c --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/ContentUnderstandingClientTest.cs @@ -0,0 +1,1402 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +#nullable enable + +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Threading.Tasks; +using Azure.AI.ContentUnderstanding; +using Azure.AI.ContentUnderstanding.Tests; +using Azure.Core; +using Azure.Core.TestFramework; +using Azure.Core.TestFramework.Models; +using NUnit.Framework; + +namespace Azure.AI.ContentUnderstanding.Tests +{ + public class ContentUnderstandingClientTest : RecordedTestBase + { + public ContentUnderstandingClientTest(bool isAsync) + : base(isAsync) + { + ContentUnderstandingTestBase.ConfigureCommonSanitizers(this); + } + + private ContentUnderstandingClient GetClient() + { + string endpoint = TestEnvironment.Endpoint; + var options = InstrumentClientOptions(new ContentUnderstandingClientOptions()); + return InstrumentClient(new ContentUnderstandingClient( + new Uri(endpoint), + TestEnvironment.Credential, + options)); + } + + /// + /// Tests updating default model deployments for the Content Understanding service. + /// Verifies that model deployments (gpt-4.1, gpt-4.1-mini, text-embedding-3-large) can be updated and are correctly persisted. + /// + [RecordedTest] + public async Task UpdateDefaultsAsync() + { + ContentUnderstandingClient client = GetClient(); + + // Check if model deployments are configured in test environment + string? gpt41Deployment = TestEnvironment.Gpt41Deployment; + string? gpt41MiniDeployment = TestEnvironment.Gpt41MiniDeployment; + string? textEmbeddingDeployment = TestEnvironment.TextEmbedding3LargeDeployment; + + if (string.IsNullOrEmpty(gpt41Deployment) || string.IsNullOrEmpty(gpt41MiniDeployment) || string.IsNullOrEmpty(textEmbeddingDeployment)) + { + Assert.Inconclusive("Model deployments are not configured in test environment. Skipping UpdateDefaultsAsync test."); + return; + } + + // Update defaults with configured deployments + var modelDeployments = new Dictionary + { + ["gpt-4.1"] = gpt41Deployment!, + ["gpt-4.1-mini"] = gpt41MiniDeployment!, + ["text-embedding-3-large"] = textEmbeddingDeployment! + }; + + Response response = await client.UpdateDefaultsAsync(modelDeployments); + + Assert.IsNotNull(response, "Update response should not be null"); + Assert.IsNotNull(response.Value, "Updated defaults should not be null"); + + ContentUnderstandingDefaults updatedDefaults = response.Value; + + // Verify the updated defaults + Assert.IsNotNull(updatedDefaults.ModelDeployments, "Updated model deployments should not be null"); + Assert.IsTrue(updatedDefaults.ModelDeployments.Count >= 3, "Should have at least 3 model deployments"); + + // Verify each deployment was set correctly + Assert.IsTrue(updatedDefaults.ModelDeployments.ContainsKey("gpt-4.1"), "Should contain gpt-4.1 deployment"); + Assert.AreEqual(gpt41Deployment, updatedDefaults.ModelDeployments["gpt-4.1"], "gpt-4.1 deployment should match"); + + Assert.IsTrue(updatedDefaults.ModelDeployments.ContainsKey("gpt-4.1-mini"), "Should contain gpt-4.1-mini deployment"); + Assert.AreEqual(gpt41MiniDeployment, updatedDefaults.ModelDeployments["gpt-4.1-mini"], "gpt-4.1-mini deployment should match"); + + Assert.IsTrue(updatedDefaults.ModelDeployments.ContainsKey("text-embedding-3-large"), "Should contain text-embedding-3-large deployment"); + Assert.AreEqual(textEmbeddingDeployment, updatedDefaults.ModelDeployments["text-embedding-3-large"], "text-embedding-3-large deployment should match"); + } + + /// + /// Tests retrieving default model deployments from the Content Understanding service. + /// Verifies that the returned defaults contain the expected model deployment configurations. + /// + [RecordedTest] + public async Task GetDefaultsAsync() + { + ContentUnderstandingClient client = GetClient(); + + // Load expected model values from test environment + string? gpt41Deployment = TestEnvironment.Gpt41Deployment; + string? gpt41MiniDeployment = TestEnvironment.Gpt41MiniDeployment; + string? textEmbeddingDeployment = TestEnvironment.TextEmbedding3LargeDeployment; + + Response response = await client.GetDefaultsAsync(); + + Assert.IsNotNull(response, "Response should not be null"); + Assert.IsNotNull(response.Value, "Response value should not be null"); + + ContentUnderstandingDefaults defaults = response.Value; + + // Verify defaults structure + Assert.IsNotNull(defaults, "Defaults should not be null"); + + // ModelDeployments may be null or empty if not configured + if (defaults.ModelDeployments != null && defaults.ModelDeployments.Count > 0) + { + Assert.IsTrue(defaults.ModelDeployments.Count > 0, "Model deployments dictionary should not be empty if not null"); + + // Verify expected keys exist if deployments are configured + foreach (var kvp in defaults.ModelDeployments) + { + Assert.IsFalse(string.IsNullOrWhiteSpace(kvp.Key), "Model deployment key should not be null or empty"); + Assert.IsFalse(string.IsNullOrWhiteSpace(kvp.Value), "Model deployment value should not be null or empty"); + } + + // Verify specific model values if they are configured in test environment + if (!string.IsNullOrEmpty(gpt41Deployment)) + { + Assert.IsTrue(defaults.ModelDeployments.ContainsKey("gpt-4.1"), "Should contain gpt-4.1 deployment"); + Assert.AreEqual(gpt41Deployment, defaults.ModelDeployments["gpt-4.1"], "gpt-4.1 deployment should match test environment value"); + } + + if (!string.IsNullOrEmpty(gpt41MiniDeployment)) + { + Assert.IsTrue(defaults.ModelDeployments.ContainsKey("gpt-4.1-mini"), "Should contain gpt-4.1-mini deployment"); + Assert.AreEqual(gpt41MiniDeployment, defaults.ModelDeployments["gpt-4.1-mini"], "gpt-4.1-mini deployment should match test environment value"); + } + + if (!string.IsNullOrEmpty(textEmbeddingDeployment)) + { + Assert.IsTrue(defaults.ModelDeployments.ContainsKey("text-embedding-3-large"), "Should contain text-embedding-3-large deployment"); + Assert.AreEqual(textEmbeddingDeployment, defaults.ModelDeployments["text-embedding-3-large"], "text-embedding-3-large deployment should match test environment value"); + } + } + } + + /// + /// Tests basic binary document analysis using the prebuilt-documentSearch analyzer. + /// Verifies that the analysis operation completes successfully and returns content results. + /// + [RecordedTest] + public async Task AnalyzeBinaryAsync_Basic() + { + ContentUnderstandingClient client = GetClient(); + + // Get test file path + string filePath = ContentUnderstandingClientTestEnvironment.CreatePath("sample_invoice.pdf"); + Assert.IsTrue(File.Exists(filePath), $"Sample file should exist at {filePath}"); + + byte[] fileBytes = File.ReadAllBytes(filePath); + Assert.IsTrue(fileBytes.Length > 0, "File should not be empty"); + + BinaryData binaryData = BinaryData.FromBytes(fileBytes); + Assert.IsNotNull(binaryData, "Binary data should not be null"); + + // Analyze the document + AnalyzeResultOperation operation = await client.AnalyzeBinaryAsync( + WaitUntil.Completed, + "prebuilt-documentSearch", + "application/pdf", + binaryData); + + // Verify operation completed successfully + Assert.IsNotNull(operation, "Analysis operation should not be null"); + Assert.IsTrue(operation.HasCompleted, "Operation should be completed"); + Assert.IsNotNull(operation.GetRawResponse(), "Operation should have a raw response"); + Assert.IsTrue(operation.GetRawResponse().Status >= 200 && operation.GetRawResponse().Status < 300, + $"Response status should be successful, but was {operation.GetRawResponse().Status}"); + + // Verify result + AnalyzeResult result = operation.Value; + Assert.IsNotNull(result, "Analysis result should not be null"); + Assert.IsNotNull(result.Contents, "Result contents should not be null"); + Assert.IsTrue(result.Contents.Count > 0, "Result should contain at least one content element"); + } + + /// + /// Tests extracting markdown content from analyzed binary documents. + /// Verifies that markdown is successfully extracted and is non-empty. + /// + [RecordedTest] + public async Task AnalyzeBinaryAsync_ExtractMarkdown() + { + ContentUnderstandingClient client = GetClient(); + + // Get test file path + string filePath = ContentUnderstandingClientTestEnvironment.CreatePath("sample_invoice.pdf"); + Assert.IsTrue(File.Exists(filePath), $"Sample file should exist at {filePath}"); + + byte[] fileBytes = File.ReadAllBytes(filePath); + BinaryData binaryData = BinaryData.FromBytes(fileBytes); + + // Analyze the document + AnalyzeResultOperation operation = await client.AnalyzeBinaryAsync( + WaitUntil.Completed, + "prebuilt-documentSearch", + "application/pdf", + binaryData); + + AnalyzeResult result = operation.Value; + + // Verify contents exist + Assert.IsNotNull(result.Contents, "Result should contain contents"); + Assert.IsTrue(result.Contents!.Count > 0, "Result should have at least one content"); + Assert.AreEqual(1, result.Contents.Count, "PDF file should have exactly one content element"); + + // Extract markdown from first content + MediaContent? content = result.Contents.First(); + Assert.IsNotNull(content, "Content should not be null"); + Assert.IsInstanceOf(content, "Content should be of type MediaContent"); + + if (content is MediaContent mediaContent) + { + Assert.IsNotNull(mediaContent.Markdown, "Markdown content should not be null"); + Assert.IsTrue(mediaContent.Markdown.Length > 0, "Markdown content should not be empty"); + Assert.IsFalse(string.IsNullOrWhiteSpace(mediaContent.Markdown), + "Markdown content should not be just whitespace"); + } + } + + /// + /// Tests extracting document properties from analyzed binary documents, including MIME type, page information, and table structures. + /// Verifies page numbers, dimensions, table row/column counts, and cell indices are correctly extracted. + /// + [RecordedTest] + public async Task AnalyzeBinaryAsync_DocumentProperties() + { + ContentUnderstandingClient client = GetClient(); + + // Get test file path + string filePath = ContentUnderstandingClientTestEnvironment.CreatePath("sample_invoice.pdf"); + Assert.IsTrue(File.Exists(filePath), $"Sample file should exist at {filePath}"); + + byte[] fileBytes = File.ReadAllBytes(filePath); + BinaryData binaryData = BinaryData.FromBytes(fileBytes); + + // Analyze the document + AnalyzeResultOperation operation = await client.AnalyzeBinaryAsync( + WaitUntil.Completed, + "prebuilt-documentSearch", + "application/pdf", + binaryData); + + AnalyzeResult result = operation.Value; + + // Verify contents exist + Assert.IsNotNull(result.Contents, "Result should contain contents"); + Assert.IsTrue(result.Contents!.Count > 0, "Result should have at least one content"); + + MediaContent? content = result.Contents.First(); + Assert.IsNotNull(content, "Content should not be null for document properties validation"); + + // Verify document content type and properties + if (content is DocumentContent docContent) + { + // Validate MIME type + Assert.IsNotNull(docContent.MimeType, "MIME type should not be null"); + Assert.IsFalse(string.IsNullOrWhiteSpace(docContent.MimeType), "MIME type should not be empty"); + Assert.AreEqual("application/pdf", docContent.MimeType, "MIME type should be application/pdf"); + + // Validate page numbers + Assert.IsTrue(docContent.StartPageNumber >= 1, "Start page should be >= 1"); + Assert.IsTrue(docContent.EndPageNumber >= docContent.StartPageNumber, + "End page should be >= start page"); + int totalPages = docContent.EndPageNumber - docContent.StartPageNumber + 1; + Assert.IsTrue(totalPages > 0, "Total pages should be positive"); + + // Validate pages collection if available + if (docContent.Pages != null && docContent.Pages.Count > 0) + { + Assert.IsTrue(docContent.Pages.Count > 0, "Pages collection should not be empty when not null"); + Assert.AreEqual(totalPages, docContent.Pages.Count, + "Pages collection count should match calculated total pages"); + + var pageNumbers = new HashSet(); + + foreach (var page in docContent.Pages) + { + Assert.IsNotNull(page, "Page object should not be null"); + Assert.IsTrue(page.PageNumber >= 1, "Page number should be >= 1"); + Assert.IsTrue(page.PageNumber >= docContent.StartPageNumber && + page.PageNumber <= docContent.EndPageNumber, + $"Page number {page.PageNumber} should be within document range [{docContent.StartPageNumber}, {docContent.EndPageNumber}]"); + Assert.IsTrue(page.Width > 0, $"Page {page.PageNumber} width should be > 0, but was {page.Width}"); + Assert.IsTrue(page.Height > 0, $"Page {page.PageNumber} height should be > 0, but was {page.Height}"); + + // Ensure page numbers are unique + Assert.IsTrue(pageNumbers.Add(page.PageNumber), + $"Page number {page.PageNumber} appears multiple times"); + } + } + + // Validate tables collection if available + // Expected table counts from recording: Table 1 (2 rows, 6 columns), Table 2 (4 rows, 8 columns), Table 3 (5 rows, 2 columns) + int[] expectedRowCounts = { 2, 4, 5 }; + int[] expectedColumnCounts = { 6, 8, 2 }; + + if (docContent.Tables != null && docContent.Tables.Count > 0) + { + Assert.IsTrue(docContent.Tables.Count > 0, "Tables collection should not be empty when not null"); + Assert.AreEqual(expectedRowCounts.Length, docContent.Tables.Count, + $"Expected {expectedRowCounts.Length} tables based on recording, but found {docContent.Tables.Count}"); + + int tableCounter = 0; + foreach (var table in docContent.Tables) + { + Assert.IsNotNull(table, $"Table {tableCounter + 1} should not be null"); + + // Verify row and column counts match expected values from recording + if (tableCounter < expectedRowCounts.Length) + { + Assert.AreEqual(expectedRowCounts[tableCounter], table.RowCount, + $"Table {tableCounter + 1} row count should be {expectedRowCounts[tableCounter]}, but was {table.RowCount}"); + Assert.AreEqual(expectedColumnCounts[tableCounter], table.ColumnCount, + $"Table {tableCounter + 1} column count should be {expectedColumnCounts[tableCounter]}, but was {table.ColumnCount}"); + } + + // Validate table cells if available + if (table.Cells != null && table.Cells.Count > 0) + { + Assert.IsTrue(table.Cells.Count > 0, $"Table {tableCounter + 1} cells collection should not be empty when not null"); + + foreach (var cell in table.Cells) + { + Assert.IsNotNull(cell, "Table cell should not be null"); + Assert.IsTrue(cell.RowIndex >= 0, $"Cell row index should be >= 0, but was {cell.RowIndex}"); + Assert.IsTrue(cell.ColumnIndex >= 0, $"Cell column index should be >= 0, but was {cell.ColumnIndex}"); + + // RowSpan and ColumnSpan are nullable, default to 1 if null + int rowSpan = cell.RowSpan ?? 1; + int columnSpan = cell.ColumnSpan ?? 1; + Assert.IsTrue(rowSpan >= 1, $"Cell row span should be >= 1, but was {rowSpan}"); + Assert.IsTrue(columnSpan >= 1, $"Cell column span should be >= 1, but was {columnSpan}"); + + // Verify cell indices are within declared table bounds + int cellEndRow = cell.RowIndex + rowSpan - 1; + int cellEndColumn = cell.ColumnIndex + columnSpan - 1; + Assert.IsTrue(cell.RowIndex < table.RowCount, + $"Cell row index {cell.RowIndex} should be < table row count {table.RowCount}"); + Assert.IsTrue(cellEndRow < table.RowCount, + $"Cell end row {cellEndRow} (row {cell.RowIndex} + span {rowSpan}) should be < table row count {table.RowCount}"); + Assert.IsTrue(cell.ColumnIndex < table.ColumnCount, + $"Cell column index {cell.ColumnIndex} should be < table column count {table.ColumnCount}"); + Assert.IsTrue(cellEndColumn < table.ColumnCount, + $"Cell end column {cellEndColumn} (column {cell.ColumnIndex} + span {columnSpan}) should be < table column count {table.ColumnCount}"); + } + } + + tableCounter++; + } + } + } + else + { + Assert.Warn("Expected DocumentContent but got " + content?.GetType().Name); + } + } + + /// + /// Tests analyzing a document from a URL using the prebuilt-documentSearch analyzer. + /// Verifies that the analysis operation completes successfully and returns content results. + /// + [RecordedTest] + public async Task AnalyzeUrlAsync() + { + ContentUnderstandingClient client = GetClient(); + + // Get test file URI + Uri uriSource = ContentUnderstandingClientTestEnvironment.CreateUri("invoice.pdf"); + Assert.IsNotNull(uriSource, "URI source should not be null"); + Assert.IsTrue(uriSource.IsAbsoluteUri, "URI should be absolute"); + + // Analyze the document from URL + Operation operation = await client.AnalyzeAsync( + WaitUntil.Completed, + "prebuilt-documentSearch", + inputs: new[] { new AnalyzeInput { Url = uriSource } }); + + // Verify operation completed successfully + Assert.IsNotNull(operation, "Analysis operation should not be null"); + Assert.IsTrue(operation.HasCompleted, "Operation should be completed"); + Assert.IsTrue(operation.HasValue, "Operation should have a value"); + Assert.IsNotNull(operation.GetRawResponse(), "Analysis operation should have a raw response"); + Assert.IsTrue(operation.GetRawResponse().Status >= 200 && operation.GetRawResponse().Status < 300, + $"Response status should be successful, but was {operation.GetRawResponse().Status}"); + + // Verify result + AnalyzeResult result = operation.Value; + Assert.IsNotNull(result, "Analysis result should not be null"); + Assert.IsNotNull(result.Contents, "Result contents should not be null"); + Assert.IsTrue(result.Contents.Count > 0, "Result should contain at least one content element"); + Assert.AreEqual(1, result.Contents.Count, "PDF file should have exactly one content element"); + + // Verify markdown content + MediaContent? content = result.Contents.First(); + Assert.IsNotNull(content, "Content should not be null"); + Assert.IsInstanceOf(content, "Content should be of type MediaContent"); + + if (content is MediaContent mediaContent) + { + Assert.IsNotNull(mediaContent.Markdown, "Markdown content should not be null"); + Assert.IsTrue(mediaContent.Markdown.Length > 0, "Markdown content should not be empty"); + } + } + + /// + /// Tests analyzing an invoice using the prebuilt-invoice analyzer and extracting invoice fields. + /// Verifies that invoice-specific fields (CustomerName, InvoiceDate, TotalAmount, LineItems) are extracted correctly. + /// + [RecordedTest] + public async Task AnalyzeInvoiceAsync() + { + ContentUnderstandingClient client = GetClient(); + + // Get test file URI + Uri invoiceUrl = ContentUnderstandingClientTestEnvironment.CreateUri("invoice.pdf"); + Assert.IsNotNull(invoiceUrl, "Invoice URL should not be null"); + Assert.IsTrue(invoiceUrl.IsAbsoluteUri, "Invoice URL should be absolute"); + + // Analyze the invoice + Operation operation = await client.AnalyzeAsync( + WaitUntil.Completed, + "prebuilt-invoice", + inputs: new[] { new AnalyzeInput { Url = invoiceUrl } }); + + // Verify operation completed successfully + Assert.IsNotNull(operation, "Analysis operation should not be null"); + Assert.IsTrue(operation.HasCompleted, "Operation should be completed"); + Assert.IsTrue(operation.HasValue, "Operation should have a value"); + Assert.IsNotNull(operation.GetRawResponse(), "Analysis operation should have a raw response"); + Assert.IsTrue(operation.GetRawResponse().Status >= 200 && operation.GetRawResponse().Status < 300, + $"Response status should be successful, but was {operation.GetRawResponse().Status}"); + + // Verify result + AnalyzeResult result = operation.Value; + Assert.IsNotNull(result, "Analysis result should not be null"); + Assert.IsNotNull(result.Contents, "Result should contain contents"); + Assert.IsTrue(result.Contents!.Count > 0, "Result should have at least one content"); + Assert.AreEqual(1, result.Contents.Count, "Invoice should have exactly one content element"); + + // Verify document content + var content = result.Contents?.FirstOrDefault(); + Assert.IsNotNull(content, "Content should not be null"); + Assert.IsInstanceOf(content, "Content should be of type DocumentContent"); + + if (content is DocumentContent docContent) + { + // Verify basic document properties + Assert.IsTrue(docContent.StartPageNumber >= 1, "Start page should be >= 1"); + Assert.IsTrue(docContent.EndPageNumber >= docContent.StartPageNumber, + "End page should be >= start page"); + + // Verify invoice fields exist (at least one should be present) + bool hasAnyField = docContent.Fields.ContainsKey("CustomerName") || + docContent.Fields.ContainsKey("InvoiceDate") || + docContent.Fields.ContainsKey("TotalAmount") || + docContent.Fields.ContainsKey("LineItems"); + + Assert.IsTrue(hasAnyField, "Invoice should have at least one standard invoice field"); + + // Verify CustomerName field with expected value + if (docContent.Fields.TryGetValue("CustomerName", out var customerNameField)) + { + Assert.IsTrue(customerNameField is StringField, "CustomerName should be a StringField"); + if (customerNameField is StringField customerNameStr) + { + Assert.IsFalse(string.IsNullOrWhiteSpace(customerNameStr.ValueString), + "CustomerName value should not be empty"); + // Expected value from recording: "MICROSOFT CORPORATION" + Assert.AreEqual("MICROSOFT CORPORATION", customerNameStr.ValueString, + "CustomerName should match expected value"); + Assert.IsTrue(customerNameStr.Confidence.HasValue, + "CustomerName should have confidence value"); + if (customerNameStr.Confidence.HasValue) + { + Assert.IsTrue(customerNameStr.Confidence.Value >= 0 && customerNameStr.Confidence.Value <= 1, + "CustomerName confidence should be between 0 and 1"); + } + } + } + + // Verify InvoiceDate field with expected value + if (docContent.Fields.TryGetValue("InvoiceDate", out var invoiceDateField)) + { + Assert.IsTrue(invoiceDateField is DateField, "InvoiceDate should be a DateField"); + if (invoiceDateField is DateField invoiceDate) + { + Assert.IsTrue(invoiceDate.ValueDate.HasValue, + "InvoiceDate should have a date value"); + // Expected value from recording: "2019-11-15" + var expectedDate = new DateTime(2019, 11, 15); + Assert.AreEqual(expectedDate, invoiceDate.ValueDate!.Value.Date, + "InvoiceDate should match expected value"); + Assert.IsTrue(invoiceDate.Confidence.HasValue, + "InvoiceDate should have confidence value"); + if (invoiceDate.Confidence.HasValue) + { + Assert.IsTrue(invoiceDate.Confidence.Value >= 0 && invoiceDate.Confidence.Value <= 1, + "InvoiceDate confidence should be between 0 and 1"); + } + } + } + + // Verify TotalAmount field with expected value + if (docContent.Fields.TryGetValue("TotalAmount", out var totalAmountField)) + { + Assert.IsTrue(totalAmountField is ObjectField, "TotalAmount should be an ObjectField"); + if (totalAmountField is ObjectField totalAmountObj) + { + // Verify Amount sub-field + var amountField = totalAmountObj["Amount"]; + Assert.IsNotNull(amountField, "TotalAmount.Amount should not be null"); + Assert.IsTrue(amountField is NumberField, "TotalAmount.Amount should be a NumberField"); + if (amountField is NumberField amountNum) + { + Assert.IsTrue(amountNum.ValueNumber.HasValue, + "TotalAmount.Amount should have a numeric value"); + // Expected value from recording: 110 + Assert.AreEqual(110.0, amountNum.ValueNumber!.Value, + "TotalAmount.Amount should match expected value"); + } + + // Verify CurrencyCode sub-field + var currencyField = totalAmountObj["CurrencyCode"]; + Assert.IsNotNull(currencyField, "TotalAmount.CurrencyCode should not be null"); + Assert.IsTrue(currencyField is StringField, "TotalAmount.CurrencyCode should be a StringField"); + if (currencyField is StringField currencyStr) + { + // Expected value from recording: "USD" + Assert.AreEqual("USD", currencyStr.ValueString, + "TotalAmount.CurrencyCode should match expected value"); + } + } + } + + // Verify LineItems field with expected values + if (docContent.Fields.TryGetValue("LineItems", out var lineItemsField)) + { + Assert.IsTrue(lineItemsField is ArrayField, "LineItems should be an ArrayField"); + if (lineItemsField is ArrayField lineItems) + { + // Expected count from recording: 3 + Assert.AreEqual(3, lineItems.Count, + "LineItems should have expected count"); + + // Verify first line item (Consulting Services) + if (lineItems[0] is ObjectField item1) + { + var desc1 = item1["Description"]; + Assert.IsNotNull(desc1, "Item 1 Description should not be null"); + if (desc1 is StringField desc1Str) + { + // Expected value from recording: "Consulting Services" + Assert.AreEqual("Consulting Services", desc1Str.ValueString, + "Item 1 Description should match expected value"); + } + + var qty1 = item1["Quantity"]; + Assert.IsNotNull(qty1, "Item 1 Quantity should not be null"); + if (qty1 is NumberField qty1Num && qty1Num.ValueNumber.HasValue) + { + // Expected value from recording: 2 + Assert.AreEqual(2.0, qty1Num.ValueNumber.Value, + "Item 1 Quantity should match expected value"); + } + + var unitPrice1 = item1["UnitPrice"]; + if (unitPrice1 is ObjectField unitPrice1Obj) + { + var unitPrice1Amount = unitPrice1Obj["Amount"]; + if (unitPrice1Amount is NumberField unitPrice1Num && unitPrice1Num.ValueNumber.HasValue) + { + // Expected value from recording: 30 + Assert.AreEqual(30.0, unitPrice1Num.ValueNumber.Value, + "Item 1 UnitPrice.Amount should match expected value"); + } + } + } + + // Verify second line item (Document Fee) + if (lineItems[1] is ObjectField item2) + { + var desc2 = item2["Description"]; + Assert.IsNotNull(desc2, "Item 2 Description should not be null"); + if (desc2 is StringField desc2Str) + { + // Expected value from recording: "Document Fee" + Assert.AreEqual("Document Fee", desc2Str.ValueString, + "Item 2 Description should match expected value"); + } + + var qty2 = item2["Quantity"]; + Assert.IsNotNull(qty2, "Item 2 Quantity should not be null"); + if (qty2 is NumberField qty2Num && qty2Num.ValueNumber.HasValue) + { + // Expected value from recording: 3 + Assert.AreEqual(3.0, qty2Num.ValueNumber.Value, + "Item 2 Quantity should match expected value"); + } + + var totalAmount2 = item2["TotalAmount"]; + if (totalAmount2 is ObjectField totalAmount2Obj) + { + var totalAmount2Amount = totalAmount2Obj["Amount"]; + if (totalAmount2Amount is NumberField totalAmount2Num && totalAmount2Num.ValueNumber.HasValue) + { + // Expected value from recording: 30 + Assert.AreEqual(30.0, totalAmount2Num.ValueNumber.Value, + "Item 2 TotalAmount.Amount should match expected value"); + } + } + } + + // Verify third line item (Printing Fee) + if (lineItems[2] is ObjectField item3) + { + var desc3 = item3["Description"]; + Assert.IsNotNull(desc3, "Item 3 Description should not be null"); + if (desc3 is StringField desc3Str) + { + // Expected value from recording: "Printing Fee" + Assert.AreEqual("Printing Fee", desc3Str.ValueString, + "Item 3 Description should match expected value"); + } + + var qty3 = item3["Quantity"]; + Assert.IsNotNull(qty3, "Item 3 Quantity should not be null"); + if (qty3 is NumberField qty3Num && qty3Num.ValueNumber.HasValue) + { + // Expected value from recording: 10 + Assert.AreEqual(10.0, qty3Num.ValueNumber.Value, + "Item 3 Quantity should match expected value"); + } + + var unitPrice3 = item3["UnitPrice"]; + if (unitPrice3 is ObjectField unitPrice3Obj) + { + var unitPrice3Amount = unitPrice3Obj["Amount"]; + if (unitPrice3Amount is NumberField unitPrice3Num && unitPrice3Num.ValueNumber.HasValue) + { + // Expected value from recording: 1 + Assert.AreEqual(1.0, unitPrice3Num.ValueNumber.Value, + "Item 3 UnitPrice.Amount should match expected value"); + } + } + + var totalAmount3 = item3["TotalAmount"]; + if (totalAmount3 is ObjectField totalAmount3Obj) + { + var totalAmount3Amount = totalAmount3Obj["Amount"]; + if (totalAmount3Amount is NumberField totalAmount3Num && totalAmount3Num.ValueNumber.HasValue) + { + // Expected value from recording: 10 + Assert.AreEqual(10.0, totalAmount3Num.ValueNumber.Value, + "Item 3 TotalAmount.Amount should match expected value"); + } + } + } + } + } + } + else + { + Assert.Fail("Content should be DocumentContent for invoice analysis"); + } + } + + /// + /// Tests creating a custom analyzer with field schema. + /// Verifies that the analyzer is created successfully with the specified configuration and fields. + /// + [RecordedTest] + public async Task CreateAnalyzerAsync() + { + ContentUnderstandingClient client = GetClient(); + + // Generate a unique analyzer ID + string defaultId = $"test_custom_analyzer_{Recording.Random.NewGuid().ToString("N")}"; + string analyzerId = Recording.GetVariable("analyzerId", defaultId) ?? defaultId; + + // Define field schema with custom fields + var fieldSchema = new ContentFieldSchema( + new Dictionary + { + ["company_name"] = new ContentFieldDefinition + { + Type = ContentFieldType.String, + Method = GenerationMethod.Extract, + Description = "Name of the company" + }, + ["total_amount"] = new ContentFieldDefinition + { + Type = ContentFieldType.Number, + Method = GenerationMethod.Extract, + Description = "Total amount on the document" + } + }) + { + Name = "test_schema", + Description = "Test schema for custom analyzer" + }; + + // Create analyzer configuration + var config = new ContentAnalyzerConfig + { + EnableFormula = true, + EnableLayout = true, + EnableOcr = true, + ReturnDetails = true + }; + + // Create the custom analyzer + var customAnalyzer = new ContentAnalyzer + { + BaseAnalyzerId = "prebuilt-document", + Description = "Test custom analyzer", + Config = config, + FieldSchema = fieldSchema + }; + + // Add model mappings (required for custom analyzers) + customAnalyzer.Models.Add("completion", "gpt-4.1"); + customAnalyzer.Models.Add("embedding", "text-embedding-3-large"); + + // Create the analyzer + var operation = await client.CreateAnalyzerAsync( + WaitUntil.Completed, + analyzerId, + customAnalyzer, + allowReplace: true); + + // Verify operation completed successfully + Assert.IsNotNull(operation, "Create analyzer operation should not be null"); + Assert.IsTrue(operation.HasCompleted, "Operation should be completed"); + Assert.IsTrue(operation.HasValue, "Operation should have a value"); + Assert.IsNotNull(operation.GetRawResponse(), "Create analyzer operation should have a raw response"); + Assert.IsTrue(operation.GetRawResponse().Status >= 200 && operation.GetRawResponse().Status < 300, + $"Response status should be successful, but was {operation.GetRawResponse().Status}"); + + // Verify result + ContentAnalyzer result = operation.Value; + Assert.IsNotNull(result, "Analyzer result should not be null"); + Assert.IsNotNull(result.BaseAnalyzerId, "Base analyzer ID should not be null"); + Assert.AreEqual("prebuilt-document", result.BaseAnalyzerId, "Base analyzer ID should match"); + Assert.IsNotNull(result.Config, "Analyzer config should not be null"); + Assert.IsNotNull(result.FieldSchema, "Field schema should not be null"); + Assert.AreEqual(2, result.FieldSchema.Fields.Count, "Should have 2 custom fields"); + Assert.IsTrue(result.FieldSchema.Fields.ContainsKey("company_name"), "Should contain company_name field"); + Assert.IsTrue(result.FieldSchema.Fields.ContainsKey("total_amount"), "Should contain total_amount field"); + + // Clean up: delete the analyzer + try + { + await client.DeleteAnalyzerAsync(analyzerId); + } + catch + { + // Ignore cleanup errors in tests + } + } + + /// + /// Tests creating a classifier with content categories. + /// Verifies that the classifier is created successfully with the specified categories and configuration. + /// + [RecordedTest] + public async Task CreateClassifierAsync() + { + ContentUnderstandingClient client = GetClient(); + + // Generate a unique analyzer ID + string defaultId = $"test_classifier_{Recording.Random.NewGuid().ToString("N")}"; + string analyzerId = Recording.GetVariable("analyzerId", defaultId) ?? defaultId; + + // Define content categories for classification + var categories = new Dictionary + { + ["Loan_Application"] = new ContentCategory + { + Description = "Documents submitted by individuals or businesses to request funding" + }, + ["Invoice"] = new ContentCategory + { + Description = "Billing documents issued by sellers or service providers to request payment" + }, + ["Bank_Statement"] = new ContentCategory + { + Description = "Official statements issued by banks that summarize account activity" + } + }; + + // Create analyzer configuration + var config = new ContentAnalyzerConfig + { + ReturnDetails = true, + EnableSegment = true + }; + + // Add categories to config + foreach (var kvp in categories) + { + config.ContentCategories.Add(kvp.Key, kvp.Value); + } + + // Create the classifier analyzer + var classifier = new ContentAnalyzer + { + BaseAnalyzerId = "prebuilt-document", + Description = "Custom classifier for financial document categorization", + Config = config + }; + classifier.Models.Add("completion", "gpt-4.1"); + + // Create the classifier + var operation = await client.CreateAnalyzerAsync( + WaitUntil.Completed, + analyzerId, + classifier); + + // Verify operation completed successfully + Assert.IsNotNull(operation, "Create classifier operation should not be null"); + Assert.IsTrue(operation.HasCompleted, "Operation should be completed"); + Assert.IsTrue(operation.HasValue, "Operation should have a value"); + Assert.IsNotNull(operation.GetRawResponse(), "Create classifier operation should have a raw response"); + Assert.IsTrue(operation.GetRawResponse().Status >= 200 && operation.GetRawResponse().Status < 300, + $"Response status should be successful, but was {operation.GetRawResponse().Status}"); + + // Verify result + ContentAnalyzer result = operation.Value; + Assert.IsNotNull(result, "Classifier result should not be null"); + Assert.IsNotNull(result.BaseAnalyzerId, "Base analyzer ID should not be null"); + Assert.AreEqual("prebuilt-document", result.BaseAnalyzerId, "Base analyzer ID should match"); + Assert.IsNotNull(result.Config, "Classifier config should not be null"); + Assert.IsNotNull(result.Config.ContentCategories, "Content categories should not be null"); + Assert.AreEqual(3, result.Config.ContentCategories.Count, "Should have 3 content categories"); + Assert.IsTrue(result.Config.ContentCategories.ContainsKey("Loan_Application"), "Should contain Loan_Application category"); + Assert.IsTrue(result.Config.ContentCategories.ContainsKey("Invoice"), "Should contain Invoice category"); + Assert.IsTrue(result.Config.ContentCategories.ContainsKey("Bank_Statement"), "Should contain Bank_Statement category"); + + try + { + // Analyze mixed financial document with segmentation enabled + string filePath = ContentUnderstandingClientTestEnvironment.CreatePath("mixed_financial_docs.pdf"); + Assert.IsTrue(File.Exists(filePath), $"Sample file should exist at {filePath}"); + + byte[] fileBytes = File.ReadAllBytes(filePath); + Assert.IsTrue(fileBytes.Length > 0, "File should not be empty"); + + BinaryData binaryData = BinaryData.FromBytes(fileBytes); + + // Analyze the document using the classifier + AnalyzeResultOperation analyzeOperation = await client.AnalyzeBinaryAsync( + WaitUntil.Completed, + analyzerId, + "application/pdf", + binaryData); + + // Verify analysis operation completed successfully + Assert.IsNotNull(analyzeOperation, "Analysis operation should not be null"); + Assert.IsTrue(analyzeOperation.HasCompleted, "Operation should be completed"); + Assert.IsTrue(analyzeOperation.HasValue, "Operation should have a value"); + Assert.IsNotNull(analyzeOperation.GetRawResponse(), "Analysis operation should have a raw response"); + Assert.IsTrue(analyzeOperation.GetRawResponse().Status >= 200 && analyzeOperation.GetRawResponse().Status < 300, + $"Response status should be successful, but was {analyzeOperation.GetRawResponse().Status}"); + + // Verify analysis result + AnalyzeResult analyzeResult = analyzeOperation.Value; + Assert.IsNotNull(analyzeResult, "Analysis result should not be null"); + Assert.IsNotNull(analyzeResult.Contents, "Result should contain contents"); + Assert.IsTrue(analyzeResult.Contents.Count > 0, "Result should have at least one content"); + Assert.AreEqual(1, analyzeResult.Contents.Count, "Result should have exactly one content element"); + + // Verify document content and segments + var documentContent = analyzeResult.Contents?.FirstOrDefault() as DocumentContent; + Assert.IsNotNull(documentContent, "Content should be DocumentContent"); + Assert.IsTrue(documentContent!.StartPageNumber >= 1, "Start page should be >= 1"); + Assert.IsTrue(documentContent.EndPageNumber >= documentContent.StartPageNumber, + "End page should be >= start page"); + + // With EnableSegment=true, we expect automatic segmentation into 3 sections + Assert.IsNotNull(documentContent.Segments, "Segments should not be null when EnableSegment=true"); + Assert.IsTrue(documentContent.Segments!.Count > 0, "Should have at least one segment with EnableSegment=true"); + // Expected: 3 segments (one for each category: Loan_Application, Invoice, Bank_Statement) + Assert.AreEqual(3, documentContent.Segments.Count, + "Mixed financial document should be segmented into 3 sections (one per category)"); + + // Verify each segment with expected values from recording + var sortedSegments = documentContent.Segments.OrderBy(s => s.StartPageNumber).ToList(); + + // Expected segment values from recording: + // Segment 1: Invoice, Pages 1-1, segmentId: segment1 + // Segment 2: Bank_Statement, Pages 2-3, segmentId: segment2 + // Segment 3: Loan_Application, Pages 4-4, segmentId: segment3 + var expectedSegments = new[] + { + new { Category = "Invoice", StartPage = 1, EndPage = 1, SegmentId = "segment1" }, + new { Category = "Bank_Statement", StartPage = 2, EndPage = 3, SegmentId = "segment2" }, + new { Category = "Loan_Application", StartPage = 4, EndPage = 4, SegmentId = "segment3" } + }; + + for (int i = 0; i < sortedSegments.Count; i++) + { + var segment = sortedSegments[i]; + Assert.IsNotNull(segment, $"Segment {i + 1} should not be null"); + Assert.IsTrue(segment.StartPageNumber >= 1, + $"Segment {i + 1} start page should be >= 1, but was {segment.StartPageNumber}"); + Assert.IsTrue(segment.EndPageNumber >= segment.StartPageNumber, + $"Segment {i + 1} end page should be >= start page"); + Assert.IsTrue(segment.StartPageNumber >= documentContent.StartPageNumber && + segment.EndPageNumber <= documentContent.EndPageNumber, + $"Segment {i + 1} page range [{segment.StartPageNumber}, {segment.EndPageNumber}] should be within document range [{documentContent.StartPageNumber}, {documentContent.EndPageNumber}]"); + + // Verify expected values from recording + if (i < expectedSegments.Length) + { + var expected = expectedSegments[i]; + + // Verify category matches expected value + Assert.AreEqual(expected.Category, segment.Category, + $"Segment {i + 1} category should match expected value"); + + // Verify page numbers match expected values + Assert.AreEqual(expected.StartPage, segment.StartPageNumber, + $"Segment {i + 1} start page should match expected value"); + Assert.AreEqual(expected.EndPage, segment.EndPageNumber, + $"Segment {i + 1} end page should match expected value"); + + // Verify segment ID matches expected value + if (!string.IsNullOrEmpty(segment.SegmentId)) + { + Assert.AreEqual(expected.SegmentId, segment.SegmentId, + $"Segment {i + 1} ID should match expected value"); + } + } + } + + // Verify segments cover the entire document without gaps + var minSegmentPage = sortedSegments.Min(s => s.StartPageNumber); + var maxSegmentPage = sortedSegments.Max(s => s.EndPageNumber); + Assert.IsTrue(minSegmentPage <= documentContent.StartPageNumber, + "Segments should start at or before document start page"); + Assert.IsTrue(maxSegmentPage >= documentContent.EndPageNumber, + "Segments should end at or after document end page"); + } + finally + { + // Clean up: delete the classifier + try + { + await client.DeleteAnalyzerAsync(analyzerId); + } + catch + { + // Ignore cleanup errors in tests + } + } + } + + /// + /// Tests retrieving analyzer information for both prebuilt and custom analyzers. + /// Verifies that analyzer details are returned correctly. + /// + [RecordedTest] + public async Task GetAnalyzerAsync() + { + ContentUnderstandingClient client = GetClient(); + + // Test getting a prebuilt analyzer + var prebuiltResponse = await client.GetAnalyzerAsync("prebuilt-documentSearch"); + Assert.IsNotNull(prebuiltResponse, "Response should not be null"); + Assert.IsTrue(prebuiltResponse.HasValue, "Response should have a value"); + Assert.IsNotNull(prebuiltResponse.Value, "Analyzer should not be null"); + + ContentAnalyzer prebuiltAnalyzer = prebuiltResponse.Value; + Assert.IsNotNull(prebuiltAnalyzer, "Prebuilt analyzer should not be null"); + + // Verify raw response + var rawResponse = prebuiltResponse.GetRawResponse(); + Assert.IsNotNull(rawResponse, "Raw response should not be null"); + Assert.AreEqual(200, rawResponse.Status, "Response status should be 200"); + + // Test getting prebuilt-invoice analyzer (should have field schema) + var invoiceResponse = await client.GetAnalyzerAsync("prebuilt-invoice"); + Assert.IsNotNull(invoiceResponse, "Invoice response should not be null"); + Assert.IsTrue(invoiceResponse.HasValue, "Invoice response should have a value"); + Assert.IsNotNull(invoiceResponse.Value, "Invoice analyzer should not be null"); + + ContentAnalyzer invoiceAnalyzer = invoiceResponse.Value; + Assert.IsNotNull(invoiceAnalyzer.FieldSchema, "Invoice analyzer should have field schema"); + Assert.IsNotNull(invoiceAnalyzer.FieldSchema!.Fields, "Invoice analyzer should have fields"); + Assert.IsTrue(invoiceAnalyzer.FieldSchema.Fields.Count > 0, + "Invoice analyzer should have at least one field"); + } + + /// + /// Tests listing all analyzers. + /// Verifies that the list includes prebuilt analyzers and optionally custom analyzers. + /// + [RecordedTest] + public async Task ListAnalyzersAsync() + { + ContentUnderstandingClient client = GetClient(); + + // List all analyzers + var analyzers = new List(); + await foreach (var analyzer in client.GetAnalyzersAsync()) + { + analyzers.Add(analyzer); + } + + // Verify we got analyzers + Assert.IsNotNull(analyzers, "Analyzers list should not be null"); + Assert.IsTrue(analyzers.Count > 0, "Should have at least one analyzer"); + + // Verify counts + var prebuiltCount = analyzers.Count(a => a.AnalyzerId?.StartsWith("prebuilt-") == true); + var customCount = analyzers.Count(a => a.AnalyzerId?.StartsWith("prebuilt-") != true); + Assert.IsTrue(prebuiltCount > 0, "Should have at least one prebuilt analyzer"); + Assert.AreEqual(analyzers.Count, prebuiltCount + customCount, + "Total count should equal prebuilt + custom count"); + + // Verify each analyzer has required properties + foreach (var analyzer in analyzers) + { + Assert.IsNotNull(analyzer, "Analyzer should not be null"); + Assert.IsNotNull(analyzer.AnalyzerId, "Analyzer ID should not be null"); + Assert.IsFalse(string.IsNullOrWhiteSpace(analyzer.AnalyzerId), + $"Analyzer ID should not be empty or whitespace"); + } + + // Verify common prebuilt analyzers exist + var analyzerIds = analyzers.Select(a => a.AnalyzerId).Where(id => id != null).ToList(); + var commonPrebuiltAnalyzers = new[] + { + "prebuilt-document", + "prebuilt-documentSearch", + "prebuilt-invoice" + }; + + foreach (var prebuiltId in commonPrebuiltAnalyzers) + { + Assert.IsTrue(analyzerIds.Contains(prebuiltId), + $"Should contain common prebuilt analyzer: {prebuiltId}"); + } + + // Verify no duplicate analyzer IDs + var duplicateIds = analyzerIds + .GroupBy(id => id) + .Where(g => g.Count() > 1) + .Select(g => g.Key) + .ToList(); + + Assert.AreEqual(0, duplicateIds.Count, + $"Should not have duplicate analyzer IDs: {string.Join(", ", duplicateIds)}"); + } + + /// + /// Tests updating an analyzer's description and tags. + /// Verifies that the analyzer can be updated successfully and changes are persisted. + /// + [RecordedTest] + public async Task UpdateAnalyzerAsync() + { + ContentUnderstandingClient client = GetClient(); + + // First create an analyzer to update + string defaultId = $"test_analyzer_{Recording.Random.NewGuid().ToString("N")}"; + string analyzerId = Recording.GetVariable("updateAnalyzerId", defaultId) ?? defaultId; + + var initialAnalyzer = new ContentAnalyzer + { + BaseAnalyzerId = "prebuilt-document", + Description = "Initial description", + Config = new ContentAnalyzerConfig + { + ReturnDetails = true + } + }; + initialAnalyzer.Models.Add("completion", "gpt-4.1"); + initialAnalyzer.Tags["tag1"] = "tag1_initial_value"; + initialAnalyzer.Tags["tag2"] = "tag2_initial_value"; + + await client.CreateAnalyzerAsync( + WaitUntil.Completed, + analyzerId, + initialAnalyzer, + allowReplace: true); + + try + { + // Get the current analyzer to preserve base analyzer ID + var currentAnalyzer = await client.GetAnalyzerAsync(analyzerId); + Assert.IsNotNull(currentAnalyzer, "Current analyzer should not be null"); + Assert.IsTrue(currentAnalyzer.HasValue, "Current analyzer should have a value"); + + // Create an updated analyzer with new description and tags + var updatedAnalyzer = new ContentAnalyzer + { + BaseAnalyzerId = currentAnalyzer.Value.BaseAnalyzerId, + Description = "Updated description" + }; + + // Update tags (empty string sets tag to empty, doesn't remove it) + updatedAnalyzer.Tags["tag1"] = "tag1_updated_value"; + updatedAnalyzer.Tags["tag2"] = ""; // Set tag2 to empty string + updatedAnalyzer.Tags["tag3"] = "tag3_value"; // Add tag3 + + // Update the analyzer + await client.UpdateAnalyzerAsync(analyzerId, updatedAnalyzer); + + // Verify the update + var updated = await client.GetAnalyzerAsync(analyzerId); + Assert.IsNotNull(updated, "Updated analyzer should not be null"); + Assert.IsTrue(updated.HasValue, "Updated analyzer should have a value"); + Assert.AreEqual("Updated description", updated.Value.Description, + "Description should be updated"); + Assert.IsTrue(updated.Value.Tags.ContainsKey("tag1"), "tag1 should exist"); + Assert.AreEqual("tag1_updated_value", updated.Value.Tags["tag1"], + "tag1 should have updated value"); + // Note: Setting tag to empty string doesn't remove it, just sets it to empty + Assert.IsTrue(updated.Value.Tags.ContainsKey("tag2"), + "tag2 should still exist (empty string doesn't remove tags)"); + Assert.AreEqual("", updated.Value.Tags["tag2"], + "tag2 should have empty string value"); + Assert.IsTrue(updated.Value.Tags.ContainsKey("tag3"), "tag3 should exist"); + Assert.AreEqual("tag3_value", updated.Value.Tags["tag3"], + "tag3 should have correct value"); + Assert.AreEqual(3, updated.Value.Tags.Count, + "Should have 3 tags after update (tag1 updated, tag2 set to empty, tag3 added)"); + } + finally + { + // Clean up + try + { + await client.DeleteAnalyzerAsync(analyzerId); + } + catch + { + // Ignore cleanup errors + } + } + } + + /// + /// Tests deleting an analyzer. + /// Verifies that an analyzer can be deleted successfully. + /// + [RecordedTest] + public async Task DeleteAnalyzerAsync() + { + ContentUnderstandingClient client = GetClient(); + + // First create an analyzer to delete + string defaultId = $"test_analyzer_{Recording.Random.NewGuid().ToString("N")}"; + string analyzerId = Recording.GetVariable("deleteAnalyzerId", defaultId) ?? defaultId; + + var analyzer = new ContentAnalyzer + { + BaseAnalyzerId = "prebuilt-document", + Description = "Simple analyzer for deletion example", + Config = new ContentAnalyzerConfig + { + ReturnDetails = true + } + }; + analyzer.Models.Add("completion", "gpt-4.1"); + + await client.CreateAnalyzerAsync( + WaitUntil.Completed, + analyzerId, + analyzer, + allowReplace: true); + + // Verify the analyzer was created + var getResponse = await client.GetAnalyzerAsync(analyzerId); + Assert.IsNotNull(getResponse, "Get analyzer response should not be null"); + Assert.IsTrue(getResponse.HasValue, "Get analyzer response should have a value"); + + // Delete the analyzer + await client.DeleteAnalyzerAsync(analyzerId); + + // Verify the analyzer was deleted (should throw 404 or similar) + try + { + var deletedResponse = await client.GetAnalyzerAsync(analyzerId); + // If we get here, the analyzer still exists (unexpected) + Assert.Fail("Analyzer should have been deleted, but GetAnalyzerAsync succeeded"); + } + catch (RequestFailedException ex) when (ex.Status == 404) + { + // Expected: analyzer not found after deletion + Assert.Pass("Analyzer was successfully deleted (404 as expected)"); + } + } + + /// + /// Tests analyzing a document with specific configurations enabled (formulas, layout, OCR). + /// Verifies that document features like charts, annotations, and formulas can be extracted. + /// + [RecordedTest] + public async Task AnalyzeConfigsAsync() + { + ContentUnderstandingClient client = GetClient(); + + // Get test file path + string filePath = ContentUnderstandingClientTestEnvironment.CreatePath("sample_document_features.pdf"); + Assert.IsTrue(File.Exists(filePath), $"Test file should exist at {filePath}"); + + byte[] fileBytes = File.ReadAllBytes(filePath); + Assert.IsTrue(fileBytes.Length > 0, "File should not be empty"); + + BinaryData binaryData = BinaryData.FromBytes(fileBytes); + + // Analyze with prebuilt-documentSearch which has formulas, layout, and OCR enabled + AnalyzeResultOperation operation = await client.AnalyzeBinaryAsync( + WaitUntil.Completed, + "prebuilt-documentSearch", + "application/pdf", + binaryData); + + // Verify operation completed successfully + Assert.IsNotNull(operation, "Analysis operation should not be null"); + Assert.IsTrue(operation.HasCompleted, "Operation should be completed"); + Assert.IsTrue(operation.HasValue, "Operation should have a value"); + Assert.IsNotNull(operation.GetRawResponse(), "Analysis operation should have a raw response"); + Assert.IsTrue(operation.GetRawResponse().Status >= 200 && operation.GetRawResponse().Status < 300, + $"Response status should be successful, but was {operation.GetRawResponse().Status}"); + + // Verify result + AnalyzeResult result = operation.Value; + Assert.IsNotNull(result, "Analysis result should not be null"); + Assert.IsNotNull(result.Contents, "Result should contain contents"); + Assert.IsTrue(result.Contents.Count > 0, "Result should have at least one content"); + Assert.AreEqual(1, result.Contents.Count, "PDF file should have exactly one content element"); + + // Verify document content + var documentContent = result.Contents?.FirstOrDefault() as DocumentContent; + Assert.IsNotNull(documentContent, "Content should be DocumentContent"); + Assert.IsTrue(documentContent!.StartPageNumber >= 1, "Start page should be >= 1"); + Assert.IsTrue(documentContent.EndPageNumber >= documentContent.StartPageNumber, + "End page should be >= start page"); + } + + /// + /// Tests analyzing a document and returning raw JSON response. + /// Verifies that the raw JSON response can be retrieved and parsed. + /// + [RecordedTest] + public async Task AnalyzeReturnRawJsonAsync() + { + ContentUnderstandingClient client = GetClient(); + + // Get test file path + string filePath = ContentUnderstandingClientTestEnvironment.CreatePath("sample_invoice.pdf"); + Assert.IsTrue(File.Exists(filePath), $"Sample file should exist at {filePath}"); + + byte[] fileBytes = File.ReadAllBytes(filePath); + Assert.IsTrue(fileBytes.Length > 0, "File should not be empty"); + + // Use protocol method to get raw JSON response + var operation = await client.AnalyzeBinaryAsync( + WaitUntil.Completed, + "prebuilt-documentSearch", + "application/pdf", + RequestContent.Create(BinaryData.FromBytes(fileBytes))); + + // Verify operation completed successfully + Assert.IsNotNull(operation, "Analysis operation should not be null"); + Assert.IsTrue(operation.HasCompleted, "Operation should be completed"); + Assert.IsTrue(operation.HasValue, "Operation should have a value"); + Assert.IsNotNull(operation.GetRawResponse(), "Analysis operation should have a raw response"); + Assert.IsTrue(operation.GetRawResponse().Status >= 200 && operation.GetRawResponse().Status < 300, + $"Response status should be successful, but was {operation.GetRawResponse().Status}"); + + // Verify response data + BinaryData responseData = operation.Value; + Assert.IsNotNull(responseData, "Response data should not be null"); + Assert.IsTrue(responseData.ToMemory().Length > 0, "Response data should not be empty"); + + // Verify response is valid JSON + using var jsonDocument = System.Text.Json.JsonDocument.Parse(responseData); + Assert.IsNotNull(jsonDocument, "Response should be valid JSON"); + Assert.IsNotNull(jsonDocument.RootElement, "JSON should have root element"); + } + + /// + /// Tests deleting an analysis result. + /// Verifies that an analysis result can be deleted using its operation ID. + /// + [RecordedTest] + public async Task DeleteResultAsync() + { + ContentUnderstandingClient client = GetClient(); + + // Get test file URI + Uri documentUrl = ContentUnderstandingClientTestEnvironment.CreateUri("invoice.pdf"); + Assert.IsNotNull(documentUrl, "Document URL should not be null"); + Assert.IsTrue(documentUrl.IsAbsoluteUri, "Document URL should be absolute"); + + // Start the analysis operation + var analyzeOperation = await client.AnalyzeAsync( + WaitUntil.Started, + "prebuilt-invoice", + inputs: new[] { new AnalyzeInput { Url = documentUrl } }); + + // Get the operation ID from the operation + string operationId = analyzeOperation.Id; + Assert.IsNotNull(operationId, "Operation ID should not be null"); + Assert.IsFalse(string.IsNullOrWhiteSpace(operationId), "Operation ID should not be empty"); + + // Wait for completion + await analyzeOperation.WaitForCompletionAsync(); + AnalyzeResult result = analyzeOperation.Value; + + // Verify analysis completed successfully + Assert.IsNotNull(result, "Analysis result should not be null"); + Assert.IsNotNull(result.Contents, "Result should contain contents"); + Assert.IsTrue(result.Contents!.Count > 0, "Result should have at least one content"); + + // Delete the analysis result + await client.DeleteResultAsync(operationId); + + // Verify deletion succeeded (no exception means deletion was successful) + // Note: There's no direct way to verify deletion by querying the result, + // but if DeleteResultAsync completes without throwing, the deletion was successful + Assert.Pass("Analysis result deletion completed successfully"); + } + + /// + /// Tests retrieving result files (keyframe images) from video analysis. + /// Verifies that keyframes can be retrieved using GetResultFileAsync. + /// + [RecordedTest] + public async Task GetResultFileAsync() + { + ContentUnderstandingClient client = GetClient(); + + // Use video URL from sample + Uri videoUrl = new Uri("https://github.com/Azure-Samples/azure-ai-content-understanding-assets/raw/refs/heads/main/videos/sdk_samples/FlightSimulator.mp4"); + Assert.IsNotNull(videoUrl, "Video URL should not be null"); + Assert.IsTrue(videoUrl.IsAbsoluteUri, "Video URL should be absolute"); + + // Start the analysis operation + var analyzeOperation = await client.AnalyzeAsync( + WaitUntil.Started, + "prebuilt-videoSearch", + inputs: new[] { new AnalyzeInput { Url = videoUrl } }); + + // Get the operation ID from the operation + string operationId = analyzeOperation.Id; + Assert.IsNotNull(operationId, "Operation ID should not be null"); + Assert.IsFalse(string.IsNullOrWhiteSpace(operationId), "Operation ID should not be empty"); + + // Wait for completion + await analyzeOperation.WaitForCompletionAsync(); + AnalyzeResult result = analyzeOperation.Value; + + // Verify analysis completed successfully + Assert.IsNotNull(result, "Analysis result should not be null"); + Assert.IsNotNull(result.Contents, "Result should contain contents"); + Assert.IsTrue(result.Contents!.Count > 0, "Result should have at least one content"); + + // Find video content with keyframes + var videoContent = result.Contents?.FirstOrDefault(c => c is AudioVisualContent) as AudioVisualContent; + Assert.IsNotNull(videoContent, "Test requires AudioVisualContent (video content) for GetResultFile"); + Assert.IsNotNull(videoContent!.KeyFrameTimesMs, "KeyFrameTimesMs should not be null"); + Assert.IsTrue(videoContent.KeyFrameTimesMs!.Count > 0, + $"Video content should have at least one keyframe, but found {videoContent.KeyFrameTimesMs.Count}"); + + // Get the first keyframe + long firstFrameTimeMs = videoContent.KeyFrameTimesMs[0]; + string framePath = $"keyframes/{firstFrameTimeMs}"; + + // Get the result file (keyframe image) + Response fileResponse = await client.GetResultFileAsync(operationId, framePath); + + // Verify response + Assert.IsNotNull(fileResponse, "File response should not be null"); + Assert.IsTrue(fileResponse.HasValue, "File response should have a value"); + Assert.IsNotNull(fileResponse.Value, "File response value should not be null"); + + // Verify raw response + var rawResponse = fileResponse.GetRawResponse(); + Assert.IsNotNull(rawResponse, "Raw response should not be null"); + Assert.IsTrue(rawResponse.Status >= 200 && rawResponse.Status < 300, + $"Response status should be successful, but was {rawResponse.Status}"); + + // Verify file data + byte[] imageBytes = fileResponse.Value.ToArray(); + Assert.IsTrue(imageBytes.Length > 0, "Keyframe image should not be empty"); + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/Infrastructure/ContentUnderstandingClientTestEnvironment.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/Infrastructure/ContentUnderstandingClientTestEnvironment.cs new file mode 100644 index 000000000000..4d41c1befae1 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/Infrastructure/ContentUnderstandingClientTestEnvironment.cs @@ -0,0 +1,140 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +#nullable enable + +using System; +using System.IO; +using System.Reflection; +using System.Threading.Tasks; +using Azure; +using Azure.AI.ContentUnderstanding; +using Azure.Core.TestFramework; + +namespace Azure.AI.ContentUnderstanding.Tests +{ + public class ContentUnderstandingClientTestEnvironment : TestEnvironment + { + private const string AssetsFolderName = "samples/SampleFiles"; + + // We are using assets from the Azure-Samples repository. + // Files are located at: https://github.com/Azure-Samples/azure-ai-content-understanding-dotnet/tree/main/ContentUnderstanding.Common/data + private const string FileUriFormat = "https://raw.githubusercontent.com/Azure-Samples/azure-ai-content-understanding-dotnet/main/ContentUnderstanding.Common/data/{0}"; + + private static readonly string s_currentWorkingDirectory = Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location) ?? string.Empty; + + /// + /// Gets the endpoint URL for the Content Understanding service. + /// + /// + /// This value is read from the environment variable: CONTENTUNDERSTANDING_ENDPOINT + /// In Playback mode, a fake endpoint is used: https://fake_contentunderstanding_endpoint.services.ai.azure.com/ + /// The endpoint is sanitized in recordings to prevent exposing real service endpoints. + /// + public string Endpoint => GetRecordedVariable("CONTENTUNDERSTANDING_ENDPOINT", options => options.IsSecret("https://sanitized.services.ai.azure.com/")); + + /// + /// Gets the API key for authenticating with the Content Understanding service. + /// + /// + /// The API key is sanitized in recordings to prevent exposing secrets. + /// + public string ApiKey => GetRecordedOptionalVariable("AZURE_CONTENT_UNDERSTANDING_KEY", options => options.IsSecret()); + + /// + /// Gets the GPT-4.1 deployment name (optional). + /// + public string? Gpt41Deployment => GetRecordedOptionalVariable("GPT_4_1_DEPLOYMENT"); + + /// + /// Gets the GPT-4.1-mini deployment name (optional). + /// + public string? Gpt41MiniDeployment => GetRecordedOptionalVariable("GPT_4_1_MINI_DEPLOYMENT"); + + /// + /// Gets the text-embedding-3-large deployment name (optional). + /// + public string? TextEmbedding3LargeDeployment => GetRecordedOptionalVariable("TEXT_EMBEDDING_3_LARGE_DEPLOYMENT"); + + /// + /// Gets the source resource ID for cross-resource copying (optional). + /// + public string? SourceResourceId => GetRecordedOptionalVariable("SOURCE_RESOURCE_ID", options => options.IsSecret()); + + /// + /// Gets the source region for cross-resource copying (optional). + /// + public string? SourceRegion => GetRecordedOptionalVariable("SOURCE_REGION", options => options.IsSecret()); + + /// + /// Gets the target endpoint for cross-resource copying (optional). + /// + public string TargetEndpoint => GetRecordedVariable("TARGET_ENDPOINT", options => options.IsSecret("https://sanitized.services.ai.azure.com/")); + + /// + /// Gets the target resource ID for cross-resource copying (optional). + /// + public string? TargetResourceId => GetRecordedOptionalVariable("TARGET_RESOURCE_ID", options => options.IsSecret()); + + /// + /// Gets the target region for cross-resource copying (optional). + /// + public string? TargetRegion => GetRecordedOptionalVariable("TARGET_REGION", options => options.IsSecret()); + + /// + /// Gets the target API key for cross-resource copying (optional). + /// + public string? TargetKey => GetRecordedOptionalVariable("TARGET_KEY", options => options.IsSecret()); + + /// + /// Creates a file path for a test asset file. + /// + /// The name of the test asset file. + /// The full path to the test asset file. + public static string CreatePath(string filename) + { + return Path.Combine(s_currentWorkingDirectory, AssetsFolderName, filename); + } + + /// + /// Creates a URI for a test asset file hosted on GitHub. + /// + /// The name of the test asset file in the Azure-Samples repository. + /// A URI pointing to the test asset file. + public static Uri CreateUri(string filename) + { + var uriString = string.Format(FileUriFormat, filename); + return new Uri(uriString); + } + + /// + /// Creates BinaryData from a test asset file. + /// + /// The name of the test asset file. + /// BinaryData containing the file contents. + public static BinaryData CreateBinaryData(string filename) + { + var path = CreatePath(filename); + var bytes = File.ReadAllBytes(path); + return BinaryData.FromBytes(bytes); + } + + protected override async ValueTask IsEnvironmentReadyAsync() + { + var endpoint = new Uri(Endpoint); + var credential = Credential; + var client = new ContentUnderstandingClient(endpoint, credential); + + try + { + await client.GetDefaultsAsync(); + } + catch (RequestFailedException e) when (e.Status == 401) + { + return false; + } + + return true; + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/Infrastructure/ContentUnderstandingTestBase.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/Infrastructure/ContentUnderstandingTestBase.cs new file mode 100644 index 000000000000..667f6af26bae --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/Infrastructure/ContentUnderstandingTestBase.cs @@ -0,0 +1,227 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Diagnostics; +using Azure.Core.TestFramework; +using Azure.Core.TestFramework.Models; +using NUnit.Framework; + +namespace Azure.AI.ContentUnderstanding.Tests +{ + /// + /// Serves as a base class for tests related to the Content Understanding client, providing common setup, + /// configuration, and utility methods for derived test classes. + /// + /// This class extends to enable integration testing with + /// recorded sessions. It includes functionality for configuring sanitizers to redact sensitive information from + /// logs and telemetry, enforcing custom test timeouts, and creating instrumented instances of the for testing purposes. + public class ContentUnderstandingTestBase : RecordedTestBase + { + /// + /// Initializes a new instance of the class. + /// + /// A value indicating whether the test should be executed asynchronously. to enable + /// asynchronous execution; otherwise, . + public ContentUnderstandingTestBase(bool isAsync) : base(isAsync) + { + } + + /// + /// Performs teardown operations after each test execution, enforcing a custom timeout limit. + /// + /// If the debugger is not attached, this method calculates the duration of the test + /// execution and throws a if the duration exceeds the predefined timeout + /// limit of 1200 seconds. + /// Thrown if the test execution duration exceeds the custom timeout limit of 1200 seconds. + [TearDown] + public override void GlobalTimeoutTearDown() + { + if (Debugger.IsAttached) + { + return; + } + + var duration = DateTime.UtcNow - TestStartTime; + var timeout = 1200; + + if (duration > TimeSpan.FromSeconds(timeout)) + { + throw new TestTimeoutException($"Test exceeded custom time limit of {timeout} seconds. Duration: {duration}"); + } + } + + /// + /// Initializes a new instance of the class. + /// + /// Indicates whether the test should run in asynchronous mode. + /// The optional to use for the test. If not specified, the default mode is used. + public ContentUnderstandingTestBase(bool isAsync, RecordedTestMode? mode = null) + : base(isAsync, mode) + { + ConfigureSanitizers(); + } + + /// + /// Configures sanitizers to redact sensitive information from URIs, request/response bodies, and headers in + /// logs or telemetry data. + /// + /// This method sets up a series of sanitizers to replace sensitive data with sanitized + /// values: Replaces service endpoint URIs with a generic sanitized + /// URI. Redacts Blob Storage URLs to a sanitized + /// format. Sanitizes specific fields in request/response bodies, such + /// as containerUrl and fileListPath. Removes sensitive + /// headers, including Ocp-Apim-Subscription-Key and Authorization. + /// This ensures that sensitive information is not exposed in logs or telemetry data. + private void ConfigureSanitizers() + { + ConfigureCommonSanitizers(this); + ConfigureBatchOperationSanitizers(this); + } + + /// + /// Configures common sanitizers for Content Understanding tests, including endpoint URL sanitization, + /// Operation-Location header sanitization, and sensitive header sanitization. + /// + /// The test base instance to configure sanitizers for. + /// This method should be called from test constructors to ensure consistent sanitization + /// across all Content Understanding tests. It configures: + /// + /// URI sanitizer for service endpoint URLs + /// Header regex sanitizer for Operation-Location header + /// Header sanitizers for Ocp-Apim-Subscription-Key and Authorization + /// + /// + public static void ConfigureCommonSanitizers(RecordedTestBase testBase) + { + // Sanitize endpoint URLs in request/response URIs + testBase.UriRegexSanitizers.Add(new UriRegexSanitizer( + regex: @"https://[a-zA-Z0-9\-]+\.services\.ai\.azure\.com" + ) + { + Value = "https://sanitized.services.ai.azure.com" + }); + + // Sanitize endpoint URLs in headers (e.g., Operation-Location header) + testBase.HeaderRegexSanitizers.Add(new HeaderRegexSanitizer("Operation-Location") + { + Regex = @"https://[a-zA-Z0-9\-]+\.services\.ai\.azure\.com", + Value = "https://sanitized.services.ai.azure.com" + }); + + // Sanitize sensitive headers + testBase.SanitizedHeaders.Add("Ocp-Apim-Subscription-Key"); + testBase.SanitizedHeaders.Add("Authorization"); + } + + /// + /// Configures sanitizers specific to batch operations, including Blob Storage URLs and batch-related body fields. + /// + /// The test base instance to configure sanitizers for. + /// This method configures sanitizers for: + /// + /// Blob Storage URLs + /// containerUrl in request/response bodies + /// fileListPath in request/response bodies + /// + /// + public static void ConfigureBatchOperationSanitizers(RecordedTestBase testBase) + { + // Sanitize Blob Storage URLs + testBase.UriRegexSanitizers.Add(new UriRegexSanitizer( + regex: @"https://[a-zA-Z0-9]+\.blob\.core\.windows\.net" + ) + { + Value = "https://sanitized.blob.core.windows.net" + }); + + // Sanitize containerUrl in request/response body + testBase.BodyRegexSanitizers.Add(new BodyRegexSanitizer( + regex: @"""containerUrl""\s*:\s*""[^""]*""" + ) + { + Value = @"""containerUrl"":""https://sanitized.blob.core.windows.net/container""" + }); + + // Sanitize fileListPath in request/response body + testBase.BodyRegexSanitizers.Add(new BodyRegexSanitizer( + regex: @"""fileListPath""\s*:\s*""[^""]*""" + ) + { + Value = @"""fileListPath"":""sanitized/path/files.txt""" + }); + } + + /// + /// Configures sanitizers specific to copy operations, including resource IDs and regions. + /// + /// The test base instance to configure sanitizers for. + /// This method configures sanitizers for: + /// + /// targetAzureResourceId in request/response bodies + /// targetRegion in request/response bodies + /// sourceAzureResourceId in request/response bodies + /// sourceRegion in request/response bodies + /// + /// + public static void ConfigureCopyOperationSanitizers(RecordedTestBase testBase) + { + // Sanitize resource IDs and regions in request bodies (for GrantCopyAuthorization and CopyAnalyzer) + testBase.BodyRegexSanitizers.Add(new BodyRegexSanitizer( + regex: @"""targetAzureResourceId""\s*:\s*""[^""]*""" + ) + { + Value = @"""targetAzureResourceId"":""Sanitized""" + }); + + testBase.BodyRegexSanitizers.Add(new BodyRegexSanitizer( + regex: @"""targetRegion""\s*:\s*""[^""]*""" + ) + { + Value = @"""targetRegion"":""Sanitized""" + }); + + testBase.BodyRegexSanitizers.Add(new BodyRegexSanitizer( + regex: @"""sourceAzureResourceId""\s*:\s*""[^""]*""" + ) + { + Value = @"""sourceAzureResourceId"":""Sanitized""" + }); + + testBase.BodyRegexSanitizers.Add(new BodyRegexSanitizer( + regex: @"""sourceRegion""\s*:\s*""[^""]*""" + ) + { + Value = @"""sourceRegion"":""Sanitized""" + }); + } + + /// + /// Creates and configures an instance of the for interacting with the + /// Content Understanding service. + /// + /// This method initializes the client using the endpoint and credentials provided by the + /// test environment. If an API key is available, it uses an for + /// authentication; otherwise, it falls back to the default credential. + /// A fully configured instance ready for use. + protected ContentUnderstandingClient CreateClient() + { + var endpoint = new Uri(TestEnvironment.Endpoint); + var options = InstrumentClientOptions(new ContentUnderstandingClientOptions()); + + string apiKey = TestEnvironment.ApiKey; + + if (!string.IsNullOrWhiteSpace(apiKey)) + { + var keyCredential = new AzureKeyCredential(apiKey); + return InstrumentClient(new ContentUnderstandingClient(endpoint, keyCredential, options)); + } + else + { + var credential = TestEnvironment.Credential; + return InstrumentClient(new ContentUnderstandingClient(endpoint, credential, options)); + } + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/properties/AssemblyInfo.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/properties/AssemblyInfo.cs new file mode 100644 index 000000000000..911ba6084f3a --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/properties/AssemblyInfo.cs @@ -0,0 +1,8 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System.Runtime.CompilerServices; +using Castle.Core.Internal; + +[assembly: InternalsVisibleTo(InternalsVisible.ToDynamicProxyGenAssembly2)] +[assembly: InternalsVisibleTo("Azure.Core.TestFramework.Tests, PublicKey=0024000004800000940000000602000000240000525341310004000001000100d15ddcb29688295338af4b7686603fe614abd555e09efba8fb88ee09e1f7b1ccaeed2e8f823fa9eef3fdd60217fc012ea67d2479751a0b8c087a4185541b851bd8b16f8d91b840e51b1cb0ba6fe647997e57429265e85ef62d565db50a69ae1647d54d7bd855e4db3d8a91510e5bcbd0edfbbecaa20a7bd9ae74593daa7b11b4")] diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/ContentUnderstandingSamples.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/ContentUnderstandingSamples.cs new file mode 100644 index 000000000000..4a4247482f4a --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/ContentUnderstandingSamples.cs @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using Azure.AI.ContentUnderstanding.Tests; +using Azure.Core.TestFramework; +using Azure.Core.TestFramework.Models; + +namespace Azure.AI.ContentUnderstanding.Samples +{ + [AsyncOnly] // Ensure that each sample will only run once. + public partial class ContentUnderstandingSamples : RecordedTestBase + { + public ContentUnderstandingSamples(bool isAsync) : base(isAsync) + { + // Disable diagnostic validation for samples (they're for documentation, not full test coverage) + TestDiagnostics = false; + + // Configure common sanitizers (endpoint URLs, headers) + ContentUnderstandingTestBase.ConfigureCommonSanitizers(this); + + // Configure copy operation sanitizers (resource IDs, regions) + ContentUnderstandingTestBase.ConfigureCopyOperationSanitizers(this); + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample00_ConfigureDefaults.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample00_ConfigureDefaults.cs new file mode 100644 index 000000000000..40a8cda3e9c3 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample00_ConfigureDefaults.cs @@ -0,0 +1,99 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +#nullable enable + +using System; +using System.Collections.Generic; +using System.Threading.Tasks; +using Azure; +using Azure.AI.ContentUnderstanding; +using Azure.AI.ContentUnderstanding.Tests; +using Azure.Core; +using Azure.Core.TestFramework; + +namespace Azure.AI.ContentUnderstanding.Samples +{ + public partial class ContentUnderstandingSamples + { + [RecordedTest] + public async Task ConfigureDefaultsAsync() + { + string endpoint = TestEnvironment.Endpoint; + var options = InstrumentClientOptions(new ContentUnderstandingClientOptions()); + var client = InstrumentClient(new ContentUnderstandingClient(new Uri(endpoint), TestEnvironment.Credential, options)); + + #region Snippet:ContentUnderstandingUpdateDefaults +#if SNIPPET + // Map your deployed models to the models required by prebuilt analyzers + var modelDeployments = new Dictionary + { + ["gpt-4.1"] = "", + ["gpt-4.1-mini"] = "", + ["text-embedding-3-large"] = "" + }; + + var response = await client.UpdateDefaultsAsync(modelDeployments); + ContentUnderstandingDefaults updatedDefaults = response.Value; + + Console.WriteLine("Model deployments configured successfully!"); + foreach (var kvp in updatedDefaults.ModelDeployments) + { + Console.WriteLine($" {kvp.Key} → {kvp.Value}"); + } +#else + // Only update if we have deployment names configured in environment + string? gpt41Deployment = TestEnvironment.Gpt41Deployment; + string? gpt41MiniDeployment = TestEnvironment.Gpt41MiniDeployment; + string? textEmbeddingDeployment = TestEnvironment.TextEmbedding3LargeDeployment; + + if (!string.IsNullOrEmpty(gpt41Deployment) && !string.IsNullOrEmpty(gpt41MiniDeployment) && !string.IsNullOrEmpty(textEmbeddingDeployment)) + { + var modelDeployments = new Dictionary + { + ["gpt-4.1"] = gpt41Deployment!, + ["gpt-4.1-mini"] = gpt41MiniDeployment!, + ["text-embedding-3-large"] = textEmbeddingDeployment! + }; + + var response = await client.UpdateDefaultsAsync(modelDeployments); + ContentUnderstandingDefaults updatedDefaults = response.Value; + + Console.WriteLine("Model deployments configured successfully!"); + foreach (var kvp in updatedDefaults.ModelDeployments) + { + Console.WriteLine($" {kvp.Key} → {kvp.Value}"); + } + } + else + { + Console.WriteLine("Skipping UpdateDefaults - deployment names not configured in test environment"); + } +#endif + #endregion + + #region Snippet:ContentUnderstandingGetDefaults +#if SNIPPET + var getResponse = await client.GetDefaultsAsync(); + ContentUnderstandingDefaults defaults = getResponse.Value; +#else + var getResponse = await client.GetDefaultsAsync(); + ContentUnderstandingDefaults defaults = getResponse.Value; +#endif + + Console.WriteLine("Current model deployment mappings:"); + if (defaults.ModelDeployments != null && defaults.ModelDeployments.Count > 0) + { + foreach (var kvp in defaults.ModelDeployments) + { + Console.WriteLine($" {kvp.Key} → {kvp.Value}"); + } + } + else + { + Console.WriteLine(" No model deployments configured yet."); + } + #endregion + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample01_AnalyzeBinary.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample01_AnalyzeBinary.cs new file mode 100644 index 000000000000..85fb8ec39e6b --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample01_AnalyzeBinary.cs @@ -0,0 +1,234 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +#nullable enable + +using System; +using System.IO; +using System.Linq; +using System.Threading.Tasks; +using Azure; +using Azure.AI.ContentUnderstanding; +using Azure.AI.ContentUnderstanding.Tests; +using Azure.Core; +using Azure.Core.TestFramework; +using NUnit.Framework; + +namespace Azure.AI.ContentUnderstanding.Samples +{ + public partial class ContentUnderstandingSamples + { + [RecordedTest] + public async Task AnalyzeBinaryAsync() + { + string endpoint = TestEnvironment.Endpoint; + var options = InstrumentClientOptions(new ContentUnderstandingClientOptions()); + var client = InstrumentClient(new ContentUnderstandingClient(new Uri(endpoint), TestEnvironment.Credential, options)); + + #region Snippet:ContentUnderstandingAnalyzeBinaryAsync +#if SNIPPET + string filePath = ""; +#else + string filePath = ContentUnderstandingClientTestEnvironment.CreatePath("sample_invoice.pdf"); +#endif + byte[] fileBytes = File.ReadAllBytes(filePath); + BinaryData binaryData = BinaryData.FromBytes(fileBytes); + + AnalyzeResultOperation operation = await client.AnalyzeBinaryAsync( + WaitUntil.Completed, + "prebuilt-documentSearch", + "application/pdf", + binaryData); + + AnalyzeResult result = operation.Value; + #endregion + + #region Assertion:ContentUnderstandingAnalyzeBinaryAsync + Assert.IsTrue(File.Exists(filePath), $"Sample file not found at {filePath}"); + Assert.IsTrue(fileBytes.Length > 0, "File should not be empty"); + Assert.IsNotNull(binaryData, "Binary data should not be null"); + Assert.IsNotNull(operation, "Analysis operation should not be null"); + Assert.IsTrue(operation.HasCompleted, "Operation should be completed"); + Assert.IsNotNull(operation.GetRawResponse(), "Analysis operation should have a raw response"); + Assert.IsTrue(operation.GetRawResponse().Status >= 200 && operation.GetRawResponse().Status < 300, + $"Response status should be successful, but was {operation.GetRawResponse().Status}"); + Console.WriteLine("Analysis operation properties verified"); + Assert.IsNotNull(result, "Analysis result should not be null"); + Assert.IsNotNull(result.Contents, "Result contents should not be null"); + Console.WriteLine($"Analysis result contains {result.Contents?.Count ?? 0} content(s)"); + #endregion + + #region Snippet:ContentUnderstandingExtractMarkdown + // A PDF file has only one content element even if it contains multiple pages + MediaContent? content = null; + if (result.Contents == null || result.Contents.Count == 0) + { + Console.WriteLine("(No content returned from analysis)"); + } + else + { + content = result.Contents.First(); + if (!string.IsNullOrEmpty(content.Markdown)) + { + Console.WriteLine(content.Markdown); + } + else + { + Console.WriteLine("(No markdown content available)"); + } + } + #endregion + + #region Assertion:ContentUnderstandingExtractMarkdown + Assert.IsNotNull(result.Contents, "Result should contain contents"); + Assert.IsTrue(result.Contents!.Count > 0, "Result should have at least one content"); + Assert.AreEqual(1, result.Contents.Count, "PDF file should have exactly one content element"); + Assert.IsNotNull(content, "Content should not be null"); + Assert.IsInstanceOf(content, "Content should be of type MediaContent"); + if (content is MediaContent mediaContent) + { + Assert.IsNotNull(mediaContent.Markdown, "Markdown content should not be null"); + Assert.IsTrue(mediaContent.Markdown.Length > 0, "Markdown content should not be empty"); + Assert.IsFalse(string.IsNullOrWhiteSpace(mediaContent.Markdown), + "Markdown content should not be just whitespace"); + Console.WriteLine($"Markdown content extracted successfully ({mediaContent.Markdown.Length} characters)"); + } + #endregion + + #region Snippet:ContentUnderstandingAccessDocumentProperties + // Check if this is document content to access document-specific properties + if (content is DocumentContent documentContent) + { + Console.WriteLine($"Document type: {documentContent.MimeType ?? "(unknown)"}"); + Console.WriteLine($"Start page: {documentContent.StartPageNumber}"); + Console.WriteLine($"End page: {documentContent.EndPageNumber}"); + Console.WriteLine($"Total pages: {documentContent.EndPageNumber - documentContent.StartPageNumber + 1}"); + + // Check for pages + if (documentContent.Pages != null && documentContent.Pages.Count > 0) + { + Console.WriteLine($"Number of pages: {documentContent.Pages.Count}"); + foreach (var page in documentContent.Pages) + { + var unit = documentContent.Unit?.ToString() ?? "units"; + Console.WriteLine($" Page {page.PageNumber}: {page.Width} x {page.Height} {unit}"); + } + } + + // Check for tables + if (documentContent.Tables != null && documentContent.Tables.Count > 0) + { + Console.WriteLine($"Number of tables: {documentContent.Tables.Count}"); + int tableCounter = 1; + foreach (var table in documentContent.Tables) + { + Console.WriteLine($" Table {tableCounter}: {table.RowCount} rows x {table.ColumnCount} columns"); + tableCounter++; + } + } + } + #endregion + + #region Assertion:ContentUnderstandingAccessDocumentProperties + Assert.IsNotNull(content, "Content should not be null for document properties validation"); + + if (content is DocumentContent docContent) + { + // Validate MIME type + Assert.IsNotNull(docContent.MimeType, "MIME type should not be null"); + Assert.IsFalse(string.IsNullOrWhiteSpace(docContent.MimeType), "MIME type should not be empty"); + Assert.AreEqual("application/pdf", docContent.MimeType, "MIME type should be application/pdf"); + Console.WriteLine($"MIME type verified: {docContent.MimeType}"); + + // Validate page numbers + Assert.IsTrue(docContent.StartPageNumber >= 1, "Start page should be >= 1"); + Assert.IsTrue(docContent.EndPageNumber >= docContent.StartPageNumber, + "End page should be >= start page"); + int totalPages = docContent.EndPageNumber - docContent.StartPageNumber + 1; + Assert.IsTrue(totalPages > 0, "Total pages should be positive"); + Console.WriteLine($"Page range verified: {docContent.StartPageNumber} to {docContent.EndPageNumber} ({totalPages} pages)"); + + // Validate pages collection + if (docContent.Pages != null && docContent.Pages.Count > 0) + { + Assert.IsTrue(docContent.Pages.Count > 0, "Pages collection should not be empty when not null"); + Assert.AreEqual(totalPages, docContent.Pages.Count, + "Pages collection count should match calculated total pages"); + Console.WriteLine($"Pages collection verified: {docContent.Pages.Count} pages"); + + // Track page numbers to ensure they're sequential and unique + var pageNumbers = new System.Collections.Generic.HashSet(); + + foreach (var page in docContent.Pages) + { + Assert.IsNotNull(page, "Page object should not be null"); + Assert.IsTrue(page.PageNumber >= 1, "Page number should be >= 1"); + Assert.IsTrue(page.PageNumber >= docContent.StartPageNumber && + page.PageNumber <= docContent.EndPageNumber, + $"Page number {page.PageNumber} should be within document range [{docContent.StartPageNumber}, {docContent.EndPageNumber}]"); + Assert.IsTrue(page.Width > 0, $"Page {page.PageNumber} width should be > 0, but was {page.Width}"); + Assert.IsTrue(page.Height > 0, $"Page {page.PageNumber} height should be > 0, but was {page.Height}"); + + // Ensure page numbers are unique + Assert.IsTrue(pageNumbers.Add(page.PageNumber), + $"Page number {page.PageNumber} appears multiple times"); + + Console.WriteLine($" Page {page.PageNumber}: {page.Width} x {page.Height} {docContent.Unit?.ToString() ?? "units"}"); + } + } + else + { + Console.WriteLine("āš ļø No pages collection available in document content"); + } + + // Validate tables collection + if (docContent.Tables != null && docContent.Tables.Count > 0) + { + Assert.IsTrue(docContent.Tables.Count > 0, "Tables collection should not be empty when not null"); + Console.WriteLine($"Tables collection verified: {docContent.Tables.Count} tables"); + + int tableCounter = 1; + foreach (var table in docContent.Tables) + { + Assert.IsNotNull(table, $"Table {tableCounter} should not be null"); + Assert.IsTrue(table.RowCount > 0, $"Table {tableCounter} should have at least 1 row, but had {table.RowCount}"); + Assert.IsTrue(table.ColumnCount > 0, $"Table {tableCounter} should have at least 1 column, but had {table.ColumnCount}"); + + // Validate table cells if available + if (table.Cells != null) + { + Assert.IsTrue(table.Cells.Count > 0, $"Table {tableCounter} cells collection should not be empty when not null"); + + foreach (var cell in table.Cells) + { + Assert.IsNotNull(cell, "Table cell should not be null"); + Assert.IsTrue(cell.RowIndex >= 0 && cell.RowIndex < table.RowCount, + $"Cell row index {cell.RowIndex} should be within table row count {table.RowCount}"); + Assert.IsTrue(cell.ColumnIndex >= 0 && cell.ColumnIndex < table.ColumnCount, + $"Cell column index {cell.ColumnIndex} should be within table column count {table.ColumnCount}"); + Assert.IsTrue(cell.RowSpan >= 1, $"Cell row span should be >= 1, but was {cell.RowSpan}"); + Assert.IsTrue(cell.ColumnSpan >= 1, $"Cell column span should be >= 1, but was {cell.ColumnSpan}"); + } + } + + Console.WriteLine($" Table {tableCounter}: {table.RowCount} rows x {table.ColumnCount} columns" + + (table.Cells != null ? $" ({table.Cells.Count} cells)" : "")); + tableCounter++; + } + } + else + { + Console.WriteLine("No tables found in document content"); + } + + Console.WriteLine("All document properties validated successfully"); + } + else + { + Console.WriteLine("Content is not DocumentContent type, skipping document-specific validations"); + Assert.Warn("Expected DocumentContent but got " + content?.GetType().Name); + } + #endregion + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample02_AnalyzeUrl.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample02_AnalyzeUrl.cs new file mode 100644 index 000000000000..6b043b5a8d27 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample02_AnalyzeUrl.cs @@ -0,0 +1,218 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +#nullable enable + +using System; +using System.Linq; +using System.Threading.Tasks; +using Azure; +using Azure.AI.ContentUnderstanding; +using Azure.AI.ContentUnderstanding.Tests; +using Azure.Core; +using Azure.Core.TestFramework; +using NUnit.Framework; + +namespace Azure.AI.ContentUnderstanding.Samples +{ + public partial class ContentUnderstandingSamples + { + [RecordedTest] + public async Task AnalyzeUrlAsync() + { + string endpoint = TestEnvironment.Endpoint; + var options = InstrumentClientOptions(new ContentUnderstandingClientOptions()); + var client = InstrumentClient(new ContentUnderstandingClient(new Uri(endpoint), TestEnvironment.Credential, options)); + + #region Snippet:ContentUnderstandingAnalyzeUrlAsync +#if SNIPPET + Uri uriSource = new Uri(""); +#else + Uri uriSource = ContentUnderstandingClientTestEnvironment.CreateUri("invoice.pdf"); +#endif + Operation operation = await client.AnalyzeAsync( + WaitUntil.Completed, + "prebuilt-documentSearch", + inputs: new[] { new AnalyzeInput { Url = uriSource } }); + + AnalyzeResult result = operation.Value; + #endregion + + #region Assertion:ContentUnderstandingAnalyzeUrlAsync + Assert.IsNotNull(uriSource, "URI source should not be null"); + Assert.IsTrue(uriSource.IsAbsoluteUri, "URI should be absolute"); + Assert.IsNotNull(operation, "Analysis operation should not be null"); + Assert.IsTrue(operation.HasCompleted, "Operation should be completed"); + Assert.IsTrue(operation.HasValue, "Operation should have a value"); + Assert.IsNotNull(operation.GetRawResponse(), "Analysis operation should have a raw response"); + Assert.IsTrue(operation.GetRawResponse().Status >= 200 && operation.GetRawResponse().Status < 300, + $"Response status should be successful, but was {operation.GetRawResponse().Status}"); + Console.WriteLine("Analysis operation properties verified"); + Assert.IsNotNull(result, "Analysis result should not be null"); + Assert.IsNotNull(result.Contents, "Result contents should not be null"); + Console.WriteLine($"Analysis result contains {result.Contents?.Count ?? 0} content(s)"); + #endregion + + // A PDF file has only one content element even if it contains multiple pages + MediaContent? content = null; + if (result.Contents == null || result.Contents.Count == 0) + { + Console.WriteLine("(No content returned from analysis)"); + } + else + { + content = result.Contents.First(); + if (! string.IsNullOrEmpty(content.Markdown)) + { + Console.WriteLine(content.Markdown); + } + else + { + Console.WriteLine("(No markdown content available)"); + } + } + Assert.IsNotNull(result.Contents, "Result should contain contents"); + Assert.IsTrue(result.Contents!.Count > 0, "Result should have at least one content"); + Assert.AreEqual(1, result.Contents.Count, "PDF file should have exactly one content element"); + Assert.IsNotNull(content, "Content should not be null"); + Assert.IsInstanceOf(content, "Content should be of type MediaContent"); + if (content is MediaContent mediaContent) + { + Assert.IsNotNull(mediaContent.Markdown, "Markdown content should not be null"); + Assert.IsTrue(mediaContent.Markdown.Length > 0, "Markdown content should not be empty"); + Assert.IsFalse(string.IsNullOrWhiteSpace(mediaContent.Markdown), + "Markdown content should not be just whitespace"); + Console.WriteLine($"Markdown content extracted successfully ({mediaContent.Markdown.Length} characters)"); + } + + // Check if this is document content to access document-specific properties + if (content is DocumentContent documentContent) + { + Console.WriteLine($"Document type: {documentContent.MimeType ?? "(unknown)"}"); + Console.WriteLine($"Start page: {documentContent.StartPageNumber}"); + Console.WriteLine($"End page: {documentContent.EndPageNumber}"); + Console.WriteLine($"Total pages: {documentContent.EndPageNumber - documentContent.StartPageNumber + 1}"); + + // Check for pages + if (documentContent.Pages != null && documentContent.Pages.Count > 0) + { + Console.WriteLine($"Number of pages: {documentContent.Pages.Count}"); + foreach (var page in documentContent.Pages) + { + var unit = documentContent.Unit?.ToString() ?? "units"; + Console.WriteLine($" Page {page.PageNumber}: {page.Width} x {page.Height} {unit}"); + } + } + + // Check for tables + if (documentContent.Tables != null && documentContent.Tables.Count > 0) + { + Console.WriteLine($"Number of tables: {documentContent.Tables.Count}"); + int tableCounter = 1; + foreach (var table in documentContent.Tables) + { + Console.WriteLine($" Table {tableCounter}: {table.RowCount} rows x {table.ColumnCount} columns"); + tableCounter++; + } + } + } + + Assert.IsNotNull(content, "Content should not be null for document properties validation"); + + if (content is DocumentContent docContent) + { + // Validate MIME type + Assert.IsNotNull(docContent.MimeType, "MIME type should not be null"); + Assert.IsFalse(string.IsNullOrWhiteSpace(docContent.MimeType), "MIME type should not be empty"); + Assert.AreEqual("application/pdf", docContent.MimeType, "MIME type should be application/pdf"); + Console.WriteLine($"MIME type verified: {docContent.MimeType}"); + + // Validate page numbers + Assert.IsTrue(docContent.StartPageNumber >= 1, "Start page should be >= 1"); + Assert.IsTrue(docContent.EndPageNumber >= docContent.StartPageNumber, + "End page should be >= start page"); + int totalPages = docContent.EndPageNumber - docContent.StartPageNumber + 1; + Assert.IsTrue(totalPages > 0, "Total pages should be positive"); + Console.WriteLine($"Page range verified: {docContent.StartPageNumber} to {docContent.EndPageNumber} ({totalPages} pages)"); + + // Validate pages collection + if (docContent.Pages != null && docContent.Pages.Count > 0) + { + Assert.IsTrue(docContent.Pages.Count > 0, "Pages collection should not be empty when not null"); + Assert.AreEqual(totalPages, docContent.Pages.Count, + "Pages collection count should match calculated total pages"); + Console.WriteLine($"Pages collection verified: {docContent.Pages.Count} pages"); + + // Track page numbers to ensure they're sequential and unique + var pageNumbers = new System.Collections.Generic.HashSet(); + + foreach (var page in docContent.Pages) + { + Assert.IsNotNull(page, "Page object should not be null"); + Assert.IsTrue(page.PageNumber >= 1, "Page number should be >= 1"); + Assert.IsTrue(page.PageNumber >= docContent.StartPageNumber && + page.PageNumber <= docContent.EndPageNumber, + $"Page number {page.PageNumber} should be within document range [{docContent.StartPageNumber}, {docContent.EndPageNumber}]"); + Assert.IsTrue(page.Width > 0, $"Page {page.PageNumber} width should be > 0, but was {page.Width}"); + Assert.IsTrue(page.Height > 0, $"Page {page.PageNumber} height should be > 0, but was {page.Height}"); + + // Ensure page numbers are unique + Assert.IsTrue(pageNumbers.Add(page.PageNumber), + $"Page number {page.PageNumber} appears multiple times"); + + Console.WriteLine($" Page {page.PageNumber}: {page.Width} x {page.Height} {docContent.Unit?.ToString() ?? "units"}"); + } + } + else + { + Console.WriteLine("āš ļø No pages collection available in document content"); + } + + // Validate tables collection + if (docContent.Tables != null && docContent.Tables.Count > 0) + { + Assert.IsTrue(docContent.Tables.Count > 0, "Tables collection should not be empty when not null"); + Console.WriteLine($"Tables collection verified: {docContent.Tables.Count} tables"); + + int tableCounter = 1; + foreach (var table in docContent.Tables) + { + Assert.IsNotNull(table, $"Table {tableCounter} should not be null"); + Assert.IsTrue(table.RowCount > 0, $"Table {tableCounter} should have at least 1 row, but had {table.RowCount}"); + Assert.IsTrue(table.ColumnCount > 0, $"Table {tableCounter} should have at least 1 column, but had {table.ColumnCount}"); + + // Validate table cells if available + if (table.Cells != null) + { + Assert.IsTrue(table.Cells.Count > 0, $"Table {tableCounter} cells collection should not be empty when not null"); + + foreach (var cell in table.Cells) + { + Assert.IsNotNull(cell, "Table cell should not be null"); + Assert.IsTrue(cell.RowIndex >= 0 && cell.RowIndex < table.RowCount, + $"Cell row index {cell.RowIndex} should be within table row count {table.RowCount}"); + Assert.IsTrue(cell.ColumnIndex >= 0 && cell.ColumnIndex < table.ColumnCount, + $"Cell column index {cell.ColumnIndex} should be within table column count {table.ColumnCount}"); + } + } + + Console.WriteLine($" Table {tableCounter}: {table.RowCount} rows x {table.ColumnCount} columns" + + (table.Cells != null ? $" ({table.Cells.Count} cells)" : "")); + tableCounter++; + } + } + else + { + Console.WriteLine("āš ļø No tables found in document content"); + } + + Console.WriteLine("All document properties validated successfully"); + } + else + { + Console.WriteLine("āš ļø Content is not DocumentContent type, skipping document-specific validations"); + Assert.Warn("Expected DocumentContent but got " + content?.GetType().Name); + } + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample03_AnalyzeInvoice.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample03_AnalyzeInvoice.cs new file mode 100644 index 000000000000..e08fdcc16996 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample03_AnalyzeInvoice.cs @@ -0,0 +1,370 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +#nullable enable + +using System; +using System.Linq; +using System.Threading.Tasks; +using Azure; +using Azure.AI.ContentUnderstanding; +using Azure.AI.ContentUnderstanding.Tests; +using Azure.Core; +using Azure.Core.TestFramework; +using NUnit.Framework; + +namespace Azure.AI.ContentUnderstanding.Samples +{ + public partial class ContentUnderstandingSamples + { + [RecordedTest] + public async Task AnalyzeInvoiceAsync() + { + string endpoint = TestEnvironment.Endpoint; + var options = InstrumentClientOptions(new ContentUnderstandingClientOptions()); + var client = InstrumentClient(new ContentUnderstandingClient(new Uri(endpoint), TestEnvironment.Credential, options)); + + #region Snippet:ContentUnderstandingAnalyzeInvoice +#if SNIPPET + Uri invoiceUrl = new Uri(""); +#else + Uri invoiceUrl = ContentUnderstandingClientTestEnvironment.CreateUri("invoice.pdf"); +#endif + Operation operation = await client.AnalyzeAsync( + WaitUntil.Completed, + "prebuilt-invoice", + inputs: new[] { new AnalyzeInput { Url = invoiceUrl } }); + + AnalyzeResult result = operation.Value; + #endregion + + #region Assertion:ContentUnderstandingAnalyzeInvoice + Assert.IsNotNull(invoiceUrl, "Invoice URL should not be null"); + Assert.IsTrue(invoiceUrl.IsAbsoluteUri, "Invoice URL should be absolute"); + Assert.IsNotNull(operation, "Analysis operation should not be null"); + Assert.IsTrue(operation.HasCompleted, "Operation should be completed"); + Assert.IsTrue(operation.HasValue, "Operation should have a value"); + Assert.IsNotNull(operation.GetRawResponse(), "Analysis operation should have a raw response"); + Assert.IsTrue(operation.GetRawResponse().Status >= 200 && operation.GetRawResponse().Status < 300, + $"Response status should be successful, but was {operation.GetRawResponse().Status}"); + Console.WriteLine("Analysis operation properties verified"); + Assert.IsNotNull(result, "Analysis result should not be null"); + Assert.IsNotNull(result.Contents, "Result should contain contents"); + Assert.IsTrue(result.Contents!.Count > 0, "Result should have at least one content"); + Assert.AreEqual(1, result.Contents.Count, "Invoice should have exactly one content element"); + Console.WriteLine($"Analysis result contains {result.Contents.Count} content(s)"); + #endregion + + #region Snippet:ContentUnderstandingExtractInvoiceFields + // Get the document content (invoices are documents) + if (result.Contents?.FirstOrDefault() is DocumentContent documentContent) + { + // Print document unit information + // The unit indicates the measurement system used for coordinates in the source field + Console.WriteLine($"Document unit: {documentContent.Unit ?? "unknown"}"); + Console.WriteLine($"Pages: {documentContent.StartPageNumber} to {documentContent.EndPageNumber}"); + Console.WriteLine(); + + // Extract simple string fields + var customerNameField = documentContent["CustomerName"]; + var invoiceDateField = documentContent["InvoiceDate"]; + + var customerName = customerNameField?.Value?.ToString(); + var invoiceDate = invoiceDateField?.Value?.ToString(); + + Console.WriteLine($"Customer Name: {customerName ?? "(None)"}"); + if (customerNameField != null) + { + Console.WriteLine($" Confidence: {customerNameField.Confidence?.ToString("F2") ?? "N/A"}"); + // Source is an encoded identifier containing bounding box coordinates + // Format: D(pageNumber, x1, y1, x2, y2, x3, y3, x4, y4) + // Coordinates are in the document's unit (e.g., inches for US documents) + Console.WriteLine($" Source: {customerNameField.Source ?? "N/A"}"); + if (customerNameField.Spans != null && customerNameField.Spans.Count > 0) + { + var span = customerNameField.Spans[0]; + Console.WriteLine($" Position in markdown: offset={span.Offset}, length={span.Length}"); + } + } + + Console.WriteLine($"Invoice Date: {invoiceDate ?? "(None)"}"); + if (invoiceDateField != null) + { + Console.WriteLine($" Confidence: {invoiceDateField.Confidence?.ToString("F2") ?? "N/A"}"); + Console.WriteLine($" Source: {invoiceDateField.Source ?? "N/A"}"); + if (invoiceDateField.Spans != null && invoiceDateField.Spans.Count > 0) + { + var span = invoiceDateField.Spans[0]; + Console.WriteLine($" Position in markdown: offset={span.Offset}, length={span.Length}"); + } + } + + // Extract object fields (nested structures) + if (documentContent["TotalAmount"] is ObjectField totalAmountObj) + { + var amount = totalAmountObj["Amount"]?.Value as double?; + var currency = totalAmountObj["CurrencyCode"]?.Value?.ToString(); + Console.WriteLine($"Total: {currency ?? "$"}{amount?.ToString("F2") ?? "(None)"}"); + if (totalAmountObj.Confidence.HasValue) + { + Console.WriteLine($" Confidence: {totalAmountObj.Confidence.Value:F2}"); + } + if (!string.IsNullOrEmpty(totalAmountObj.Source)) + { + Console.WriteLine($" Source: {totalAmountObj.Source}"); + } + } + + // Extract array fields (collections like line items) + if (documentContent["LineItems"] is ArrayField lineItems) + { + Console.WriteLine($"Line Items ({lineItems.Count}):"); + for (int i = 0; i < lineItems.Count; i++) + { + if (lineItems[i] is ObjectField item) + { + var description = item["Description"]?.Value?.ToString(); + var quantity = item["Quantity"]?.Value as double?; + Console.WriteLine($" Item {i + 1}: {description ?? "N/A"} (Qty: {quantity?.ToString() ?? "N/A"})"); + if (item.Confidence.HasValue) + { + Console.WriteLine($" Confidence: {item.Confidence.Value:F2}"); + } + } + } + } + } + #endregion + + #region Assertion:ContentUnderstandingExtractInvoiceFields + var content = result.Contents?.FirstOrDefault(); + Assert.IsNotNull(content, "Content should not be null"); + Assert.IsInstanceOf(content, "Content should be of type DocumentContent"); + + if (content is DocumentContent docContent) + { + // Verify basic document properties + Assert.IsTrue(docContent.StartPageNumber >= 1, "Start page should be >= 1"); + Assert.IsTrue(docContent.EndPageNumber >= docContent.StartPageNumber, + "End page should be >= start page"); + int totalPages = docContent.EndPageNumber - docContent.StartPageNumber + 1; + Assert.IsTrue(totalPages > 0, "Total pages should be positive"); + Console.WriteLine($"Document has {totalPages} page(s) from {docContent.StartPageNumber} to {docContent.EndPageNumber}"); + + // Verify document unit + if (docContent.Unit.HasValue) + { + Console.WriteLine($"Document unit: {docContent.Unit.Value}"); + } + + // Verify CustomerName field + var customerNameField = docContent["CustomerName"]; + if (customerNameField != null) + { + Console.WriteLine($"CustomerName field found"); + + if (customerNameField.Value != null) + { + Assert.IsFalse(string.IsNullOrWhiteSpace(customerNameField.Value.ToString()), + "CustomerName value should not be empty when present"); + Console.WriteLine($" Value: {customerNameField.Value}"); + } + + if (customerNameField.Confidence.HasValue) + { + Assert.IsTrue(customerNameField.Confidence.Value >= 0 && customerNameField.Confidence.Value <= 1, + $"CustomerName confidence should be between 0 and 1, but was {customerNameField.Confidence.Value}"); + Console.WriteLine($" Confidence: {customerNameField.Confidence.Value:F2}"); + } + + if (!string.IsNullOrWhiteSpace(customerNameField.Source)) + { + Assert.IsTrue(customerNameField.Source.StartsWith("D("), + "Source should start with 'D(' for document fields"); + Console.WriteLine($" Source: {customerNameField.Source}"); + } + + if (customerNameField.Spans != null && customerNameField.Spans.Count > 0) + { + Assert.IsTrue(customerNameField.Spans.Count > 0, "Spans should not be empty when not null"); + foreach (var span in customerNameField.Spans) + { + Assert.IsTrue(span.Offset >= 0, $"Span offset should be >= 0, but was {span.Offset}"); + Assert.IsTrue(span.Length > 0, $"Span length should be > 0, but was {span.Length}"); + } + Console.WriteLine($" Spans: {customerNameField.Spans.Count} span(s)"); + } + } + else + { + Console.WriteLine("āš ļø CustomerName field not found"); + } + + // Verify InvoiceDate field + var invoiceDateField = docContent["InvoiceDate"]; + if (invoiceDateField != null) + { + Console.WriteLine($"InvoiceDate field found"); + + if (invoiceDateField.Value != null) + { + Assert.IsFalse(string.IsNullOrWhiteSpace(invoiceDateField.Value.ToString()), + "InvoiceDate value should not be empty when present"); + Console.WriteLine($" Value: {invoiceDateField.Value}"); + } + + if (invoiceDateField.Confidence.HasValue) + { + Assert.IsTrue(invoiceDateField.Confidence.Value >= 0 && invoiceDateField.Confidence.Value <= 1, + $"InvoiceDate confidence should be between 0 and 1, but was {invoiceDateField.Confidence.Value}"); + Console.WriteLine($" Confidence: {invoiceDateField.Confidence.Value:F2}"); + } + + if (!string.IsNullOrWhiteSpace(invoiceDateField.Source)) + { + Assert.IsTrue(invoiceDateField.Source.StartsWith("D("), + "Source should start with 'D(' for document fields"); + Console.WriteLine($" Source: {invoiceDateField.Source}"); + } + + if (invoiceDateField.Spans != null && invoiceDateField.Spans.Count > 0) + { + Assert.IsTrue(invoiceDateField.Spans.Count > 0, "Spans should not be empty when not null"); + foreach (var span in invoiceDateField.Spans) + { + Assert.IsTrue(span.Offset >= 0, $"Span offset should be >= 0, but was {span.Offset}"); + Assert.IsTrue(span.Length > 0, $"Span length should be > 0, but was {span.Length}"); + } + Console.WriteLine($" Spans: {invoiceDateField.Spans.Count} span(s)"); + } + } + else + { + Console.WriteLine("āš ļø InvoiceDate field not found"); + } + + // Verify TotalAmount object field + if (docContent["TotalAmount"] is ObjectField totalAmountObj) + { + Console.WriteLine($"TotalAmount object field found"); + + if (totalAmountObj.Confidence.HasValue) + { + Assert.IsTrue(totalAmountObj.Confidence.Value >= 0 && totalAmountObj.Confidence.Value <= 1, + $"TotalAmount confidence should be between 0 and 1, but was {totalAmountObj.Confidence.Value}"); + Console.WriteLine($" Confidence: {totalAmountObj.Confidence.Value:F2}"); + } + + if (!string.IsNullOrEmpty(totalAmountObj.Source)) + { + Console.WriteLine($" Source: {totalAmountObj.Source}"); + } + + // Verify Amount sub-field + var amountField = totalAmountObj["Amount"]; + if (amountField != null) + { + Console.WriteLine($" Amount field found"); + if (amountField.Value is double amount) + { + Assert.IsTrue(amount >= 0, $"Amount should be >= 0, but was {amount}"); + Console.WriteLine($" Value: {amount:F2}"); + } + } + + // Verify CurrencyCode sub-field + var currencyField = totalAmountObj["CurrencyCode"]; + if (currencyField != null) + { + Console.WriteLine($" CurrencyCode field found"); + if (currencyField.Value != null) + { + var currency = currencyField.Value.ToString(); + if (!string.IsNullOrWhiteSpace(currency)) + { + // äæ®å¤ļ¼šå…ˆę£€ęŸ„ null å†ä½æē”Ø + Assert.AreEqual(3, currency.Length, + $"CurrencyCode should be 3 characters, but was '{currency}'"); + Console.WriteLine($" Value: {currency}"); + } + } + } + } + else + { + Console.WriteLine("āš ļø TotalAmount field not found"); + } + + // Verify LineItems array field + if (docContent["LineItems"] is ArrayField lineItems) + { + Console.WriteLine($"LineItems array field found with {lineItems.Count} item(s)"); + Assert.IsTrue(lineItems.Count >= 0, "LineItems count should be >= 0"); + + for (int i = 0; i < lineItems.Count; i++) + { + if (lineItems[i] is ObjectField item) + { + Console.WriteLine($" Line item {i + 1}:"); + + if (item.Confidence.HasValue) + { + Assert.IsTrue(item.Confidence.Value >= 0 && item.Confidence.Value <= 1, + $"Line item {i + 1} confidence should be between 0 and 1, but was {item.Confidence.Value}"); + Console.WriteLine($" Confidence: {item.Confidence.Value:F2}"); + } + + // Verify Description field + var descriptionField = item["Description"]; + if (descriptionField?.Value != null) + { + Assert.IsFalse(string.IsNullOrWhiteSpace(descriptionField.Value.ToString()), + $"Line item {i + 1} description should not be empty when present"); + Console.WriteLine($" Description: {descriptionField.Value}"); + } + + // Verify Quantity field + var quantityField = item["Quantity"]; + if (quantityField?.Value is double quantity) + { + Assert.IsTrue(quantity >= 0, $"Line item {i + 1} quantity should be >= 0, but was {quantity}"); + Console.WriteLine($" Quantity: {quantity}"); + } + + // Verify UnitPrice field if exists + var unitPriceField = item["UnitPrice"]; + if (unitPriceField?.Value is double unitPrice) + { + Assert.IsTrue(unitPrice >= 0, $"Line item {i + 1} unit price should be >= 0, but was {unitPrice}"); + Console.WriteLine($" UnitPrice: {unitPrice:F2}"); + } + + // Verify Amount field if exists + var itemAmountField = item["Amount"]; + if (itemAmountField?.Value is double itemAmount) + { + Assert.IsTrue(itemAmount >= 0, $"Line item {i + 1} amount should be >= 0, but was {itemAmount}"); + Console.WriteLine($" Amount: {itemAmount:F2}"); + } + } + else + { + Assert.Fail($"Line item {i + 1} should be an ObjectField"); + } + } + } + else + { + Console.WriteLine("āš ļø LineItems field not found"); + } + + Console.WriteLine("All invoice fields validated successfully"); + } + else + { + Assert.Fail("Content should be DocumentContent for invoice analysis"); + } + #endregion + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample04_CreateAnalyzer.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample04_CreateAnalyzer.cs new file mode 100644 index 000000000000..498e5847eb32 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample04_CreateAnalyzer.cs @@ -0,0 +1,606 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +#nullable enable + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using Azure; +using Azure.AI.ContentUnderstanding; +using Azure.AI.ContentUnderstanding.Tests; +using Azure.Core; +using Azure.Core.TestFramework; +using NUnit.Framework; + +namespace Azure.AI.ContentUnderstanding.Samples +{ + public partial class ContentUnderstandingSamples + { + [RecordedTest] + public async Task CreateAnalyzerAsync() + { + string endpoint = TestEnvironment.Endpoint; + var options = InstrumentClientOptions(new ContentUnderstandingClientOptions()); + var client = InstrumentClient(new ContentUnderstandingClient(new Uri(endpoint), TestEnvironment.Credential, options)); + + #region Snippet:ContentUnderstandingCreateAnalyzer +#if SNIPPET + // Generate a unique analyzer ID + string analyzerId = $"my_custom_analyzer_{DateTimeOffset.UtcNow.ToUnixTimeSeconds()}"; +#else + // Generate a unique analyzer ID and record it for playback + string defaultId = $"test_custom_analyzer_{Recording.Random.NewGuid().ToString("N")}"; + string analyzerId = Recording.GetVariable("analyzerId", defaultId) ?? defaultId; +#endif + + // Define field schema with custom fields + // This example demonstrates three extraction methods: + // - extract: Literal text extraction (requires estimateSourceAndConfidence) + // - generate: AI-generated values based on content interpretation + // - classify: Classification against predefined categories + var fieldSchema = new ContentFieldSchema( + new Dictionary + { + ["company_name"] = new ContentFieldDefinition + { + Type = ContentFieldType.String, + Method = GenerationMethod.Extract, + Description = "Name of the company" + }, + ["total_amount"] = new ContentFieldDefinition + { + Type = ContentFieldType.Number, + Method = GenerationMethod.Extract, + Description = "Total amount on the document" + }, + ["document_summary"] = new ContentFieldDefinition + { + Type = ContentFieldType.String, + Method = GenerationMethod.Generate, + Description = "A brief summary of the document content" + }, + ["document_type"] = new ContentFieldDefinition + { + Type = ContentFieldType.String, + Method = GenerationMethod.Classify, + Description = "Type of document" + } + }) + { + Name = "company_schema", + Description = "Schema for extracting company information" + }; + + // Add enum values for the classify field + fieldSchema.Fields["document_type"].Enum.Add("invoice"); + fieldSchema.Fields["document_type"].Enum.Add("receipt"); + fieldSchema.Fields["document_type"].Enum.Add("contract"); + fieldSchema.Fields["document_type"].Enum.Add("report"); + fieldSchema.Fields["document_type"].Enum.Add("other"); + + // Create analyzer configuration + var config = new ContentAnalyzerConfig + { + EnableFormula = true, + EnableLayout = true, + EnableOcr = true, + EstimateFieldSourceAndConfidence = true, + ReturnDetails = true + }; + + // Create the custom analyzer + var customAnalyzer = new ContentAnalyzer + { + BaseAnalyzerId = "prebuilt-document", + Description = "Custom analyzer for extracting company information", + Config = config, + FieldSchema = fieldSchema + }; + + // Add model mappings (required for custom analyzers) + customAnalyzer.Models.Add("completion", "gpt-4.1"); + customAnalyzer.Models.Add("embedding", "text-embedding-3-large"); + + // Create the analyzer + var operation = await client.CreateAnalyzerAsync( + WaitUntil.Completed, + analyzerId, + customAnalyzer, + allowReplace: true); + + ContentAnalyzer result = operation.Value; + Console.WriteLine($"Analyzer '{analyzerId}' created successfully!"); + #endregion + + #region Assertion:ContentUnderstandingCreateAnalyzer + Assert.IsNotNull(analyzerId, "Analyzer ID should not be null"); + Assert.IsFalse(string.IsNullOrWhiteSpace(analyzerId), "Analyzer ID should not be empty"); + Assert.IsNotNull(fieldSchema, "Field schema should not be null"); + Assert.IsNotNull(customAnalyzer, "Custom analyzer should not be null"); + Assert.IsNotNull(operation, "Create analyzer operation should not be null"); + Assert.IsTrue(operation.HasCompleted, "Operation should be completed"); + Assert.IsTrue(operation.HasValue, "Operation should have a value"); + Assert.IsNotNull(operation.GetRawResponse(), "Create analyzer operation should have a raw response"); + Assert.IsTrue(operation.GetRawResponse().Status >= 200 && operation.GetRawResponse().Status < 300, + $"Response status should be successful, but was {operation.GetRawResponse().Status}"); + Console.WriteLine("Create analyzer operation properties verified"); + + Assert.IsNotNull(result, "Analyzer result should not be null"); + Console.WriteLine($"Analyzer '{analyzerId}' created successfully"); + + // Verify base analyzer + Assert.IsNotNull(result.BaseAnalyzerId, "Base analyzer ID should not be null"); + Assert.AreEqual("prebuilt-document", result.BaseAnalyzerId, "Base analyzer ID should match"); + Console.WriteLine($"Base analyzer ID verified: {result.BaseAnalyzerId}"); + + // Verify analyzer config + Assert.IsNotNull(result.Config, "Analyzer config should not be null"); + Assert.IsTrue(result.Config.EnableFormula, "EnableFormula should be true"); + Assert.IsTrue(result.Config.EnableLayout, "EnableLayout should be true"); + Assert.IsTrue(result.Config.EnableOcr, "EnableOcr should be true"); + Assert.IsTrue(result.Config.EstimateFieldSourceAndConfidence, "EstimateFieldSourceAndConfidence should be true"); + Assert.IsTrue(result.Config.ReturnDetails, "ReturnDetails should be true"); + Console.WriteLine("Analyzer config verified"); + + // Verify field schema + Assert.IsNotNull(result.FieldSchema, "Field schema should not be null"); + Assert.IsFalse(string.IsNullOrWhiteSpace(result.FieldSchema.Name), "Field schema name should not be empty"); + Assert.AreEqual("company_schema", result.FieldSchema.Name, "Field schema name should match"); + Assert.IsFalse(string.IsNullOrWhiteSpace(result.FieldSchema.Description), "Field schema description should not be empty"); + Console.WriteLine($"Field schema verified: {result.FieldSchema.Name}"); + + // Verify field schema fields + Assert.IsNotNull(result.FieldSchema.Fields, "Field schema fields should not be null"); + Assert.AreEqual(4, result.FieldSchema.Fields.Count, "Should have 4 custom fields"); + Console.WriteLine($"Field schema contains {result.FieldSchema.Fields.Count} fields"); + + // Verify company_name field + Assert.IsTrue(result.FieldSchema.Fields.ContainsKey("company_name"), "Should contain company_name field"); + var companyNameDef = result.FieldSchema.Fields["company_name"]; + Assert.AreEqual(ContentFieldType.String, companyNameDef.Type, "company_name should be String type"); + Assert.AreEqual(GenerationMethod.Extract, companyNameDef.Method, "company_name should use Extract method"); + Assert.IsFalse(string.IsNullOrWhiteSpace(companyNameDef.Description), "company_name should have description"); + Console.WriteLine(" company_name field verified (String, Extract)"); + + // Verify total_amount field + Assert.IsTrue(result.FieldSchema.Fields.ContainsKey("total_amount"), "Should contain total_amount field"); + var totalAmountDef = result.FieldSchema.Fields["total_amount"]; + Assert.AreEqual(ContentFieldType.Number, totalAmountDef.Type, "total_amount should be Number type"); + Assert.AreEqual(GenerationMethod.Extract, totalAmountDef.Method, "total_amount should use Extract method"); + Assert.IsFalse(string.IsNullOrWhiteSpace(totalAmountDef.Description), "total_amount should have description"); + Console.WriteLine(" total_amount field verified (Number, Extract)"); + + // Verify document_summary field + Assert.IsTrue(result.FieldSchema.Fields.ContainsKey("document_summary"), "Should contain document_summary field"); + var summaryDef = result.FieldSchema.Fields["document_summary"]; + Assert.AreEqual(ContentFieldType.String, summaryDef.Type, "document_summary should be String type"); + Assert.AreEqual(GenerationMethod.Generate, summaryDef.Method, "document_summary should use Generate method"); + Assert.IsFalse(string.IsNullOrWhiteSpace(summaryDef.Description), "document_summary should have description"); + Console.WriteLine(" document_summary field verified (String, Generate)"); + + // Verify document_type field + Assert.IsTrue(result.FieldSchema.Fields.ContainsKey("document_type"), "Should contain document_type field"); + var documentTypeDef = result.FieldSchema.Fields["document_type"]; + Assert.AreEqual(ContentFieldType.String, documentTypeDef.Type, "document_type should be String type"); + Assert.AreEqual(GenerationMethod.Classify, documentTypeDef.Method, "document_type should use Classify method"); + Assert.IsFalse(string.IsNullOrWhiteSpace(documentTypeDef.Description), "document_type should have description"); + Assert.IsNotNull(documentTypeDef.Enum, "document_type should have enum values"); + Assert.AreEqual(5, documentTypeDef.Enum.Count, "document_type should have 5 enum values"); + Assert.IsTrue(documentTypeDef.Enum.Contains("invoice"), "document_type enum should contain 'invoice'"); + Assert.IsTrue(documentTypeDef.Enum.Contains("receipt"), "document_type enum should contain 'receipt'"); + Assert.IsTrue(documentTypeDef.Enum.Contains("contract"), "document_type enum should contain 'contract'"); + Assert.IsTrue(documentTypeDef.Enum.Contains("report"), "document_type enum should contain 'report'"); + Assert.IsTrue(documentTypeDef.Enum.Contains("other"), "document_type enum should contain 'other'"); + Console.WriteLine(" document_type field verified (String, Classify, 5 enum values)"); + + // Verify models + Assert.IsNotNull(result.Models, "Models should not be null"); + Assert.IsTrue(result.Models.Count >= 2, "Should have at least 2 model mappings"); + Assert.IsTrue(result.Models.ContainsKey("completion"), "Should contain 'completion' model mapping"); + Assert.IsTrue(result.Models.ContainsKey("embedding"), "Should contain 'embedding' model mapping"); + Assert.AreEqual("gpt-4.1", result.Models["completion"], "Completion model should be 'gpt-4.1'"); + Assert.AreEqual("text-embedding-3-large", result.Models["embedding"], "Embedding model should be 'text-embedding-3-large'"); + Console.WriteLine($"Model mappings verified: {result.Models.Count} model(s)"); + + // Verify description + if (!string.IsNullOrWhiteSpace(result.Description)) + { + Console.WriteLine($"Analyzer description: {result.Description}"); + } + + Console.WriteLine("All analyzer creation properties validated successfully"); + #endregion + + #region Snippet:ContentUnderstandingDeleteCreatedAnalyzer + // Clean up: delete the analyzer (for testing purposes only) + // In production, analyzers are typically kept and reused +#if SNIPPET + await client.DeleteAnalyzerAsync(analyzerId); + Console.WriteLine($"Analyzer '{analyzerId}' deleted successfully."); +#else + try + { + await client.DeleteAnalyzerAsync(analyzerId); + Console.WriteLine($"Analyzer '{analyzerId}' deleted successfully."); + } + catch + { + // Ignore cleanup errors in tests + } +#endif + #endregion + } + + [RecordedTest] + public async Task UseCustomAnalyzerAsync() + { + string endpoint = TestEnvironment.Endpoint; + var options = InstrumentClientOptions(new ContentUnderstandingClientOptions()); + var client = InstrumentClient(new ContentUnderstandingClient(new Uri(endpoint), TestEnvironment.Credential, options)); + + // First create an analyzer + // Generate a unique analyzer ID and record it for playback + string defaultId = $"test_analyzer_{Recording.Random.NewGuid().ToString("N")}"; + string analyzerId = Recording.GetVariable("useCustomAnalyzerId", defaultId) ?? defaultId; + var fieldSchema = new ContentFieldSchema( + new Dictionary + { + ["company_name"] = new ContentFieldDefinition + { + Type = ContentFieldType.String, + Method = GenerationMethod.Extract, + Description = "Name of the company" + }, + ["total_amount"] = new ContentFieldDefinition + { + Type = ContentFieldType.Number, + Method = GenerationMethod.Extract, + Description = "Total amount on the document" + }, + ["document_summary"] = new ContentFieldDefinition + { + Type = ContentFieldType.String, + Method = GenerationMethod.Generate, + Description = "A brief summary of the document content" + }, + ["document_type"] = new ContentFieldDefinition + { + Type = ContentFieldType.String, + Method = GenerationMethod.Classify, + Description = "Type of document" + } + }) + { + Name = "company_schema", + Description = "Schema for extracting company information" + }; + + // Add enum values for the classify field + fieldSchema.Fields["document_type"].Enum.Add("invoice"); + fieldSchema.Fields["document_type"].Enum.Add("receipt"); + fieldSchema.Fields["document_type"].Enum.Add("contract"); + fieldSchema.Fields["document_type"].Enum.Add("report"); + fieldSchema.Fields["document_type"].Enum.Add("other"); + + var config = new ContentAnalyzerConfig + { + EnableFormula = true, + EnableLayout = true, + EnableOcr = true + }; + + var customAnalyzer = new ContentAnalyzer + { + BaseAnalyzerId = "prebuilt-document", + Description = "Custom analyzer for extracting company information", + Config = config, + FieldSchema = fieldSchema + }; + + customAnalyzer.Models.Add("completion", "gpt-4.1"); + customAnalyzer.Models.Add("embedding", "text-embedding-3-large"); + + await client.CreateAnalyzerAsync( + WaitUntil.Completed, + analyzerId, + customAnalyzer, + allowReplace: true); + + try + { + #region Snippet:ContentUnderstandingUseCustomAnalyzer +#if SNIPPET + var documentUrl = new Uri(""); + // Analyze a document using the custom analyzer + var analyzeOperation = await client.AnalyzeAsync( + WaitUntil.Completed, + analyzerId, + inputs: new[] { new AnalyzeInput { Url = documentUrl } }); +#else + // Analyze a document using the custom analyzer + var documentUrl = ContentUnderstandingClientTestEnvironment.CreateUri("invoice.pdf"); + var analyzeOperation = await client.AnalyzeAsync( + WaitUntil.Completed, + analyzerId, + inputs: new[] { new AnalyzeInput { Url = documentUrl } }); +#endif + + var analyzeResult = analyzeOperation.Value; + + // Extract custom fields from the result + // Since EstimateFieldSourceAndConfidence is enabled, we can access confidence scores and source information + if (analyzeResult.Contents?.FirstOrDefault() is DocumentContent content) + { + // Extract field (literal text extraction) + if (content.Fields.TryGetValue("company_name", out var companyNameField)) + { + var companyName = companyNameField is StringField sf ? sf.ValueString : null; + Console.WriteLine($"Company Name (extract): {companyName ?? "(not found)"}"); + if (companyNameField != null) + { + Console.WriteLine($" Confidence: {companyNameField.Confidence?.ToString("F2") ?? "N/A"}"); + Console.WriteLine($" Source: {companyNameField.Source ?? "N/A"}"); + if (companyNameField.Spans != null && companyNameField.Spans.Count > 0) + { + var span = companyNameField.Spans[0]; + Console.WriteLine($" Position in markdown: offset={span.Offset}, length={span.Length}"); + } + } + } + + // Extract field (literal text extraction) + if (content.Fields.TryGetValue("total_amount", out var totalAmountField)) + { + var totalAmount = totalAmountField is NumberField nf ? nf.ValueNumber : null; + Console.WriteLine($"Total Amount (extract): {totalAmount?.ToString("F2") ?? "(not found)"}"); + if (totalAmountField != null) + { + Console.WriteLine($" Confidence: {totalAmountField.Confidence?.ToString("F2") ?? "N/A"}"); + Console.WriteLine($" Source: {totalAmountField.Source ?? "N/A"}"); + if (totalAmountField.Spans != null && totalAmountField.Spans.Count > 0) + { + var span = totalAmountField.Spans[0]; + Console.WriteLine($" Position in markdown: offset={span.Offset}, length={span.Length}"); + } + } + } + + // Generate field (AI-generated value) + if (content.Fields.TryGetValue("document_summary", out var summaryField)) + { + var summary = summaryField is StringField sf ? sf.ValueString : null; + Console.WriteLine($"Document Summary (generate): {summary ?? "(not found)"}"); + if (summaryField != null) + { + Console.WriteLine($" Confidence: {summaryField.Confidence?.ToString("F2") ?? "N/A"}"); + // Note: Generated fields may not have source information + if (!string.IsNullOrEmpty(summaryField.Source)) + { + Console.WriteLine($" Source: {summaryField.Source}"); + } + } + } + + // Classify field (classification against predefined categories) + if (content.Fields.TryGetValue("document_type", out var documentTypeField)) + { + var documentType = documentTypeField is StringField sf ? sf.ValueString : null; + Console.WriteLine($"Document Type (classify): {documentType ?? "(not found)"}"); + if (documentTypeField != null) + { + Console.WriteLine($" Confidence: {documentTypeField.Confidence?.ToString("F2") ?? "N/A"}"); + // Note: Classified fields may not have source information + if (!string.IsNullOrEmpty(documentTypeField.Source)) + { + Console.WriteLine($" Source: {documentTypeField.Source}"); + } + } + } + } + #endregion + + #region Assertion:ContentUnderstandingUseCustomAnalyzer + Assert.IsNotNull(documentUrl, "Document URL should not be null"); + Assert.IsTrue(documentUrl.IsAbsoluteUri, "Document URL should be absolute"); + Assert.IsNotNull(analyzeOperation, "Analyze operation should not be null"); + Assert.IsTrue(analyzeOperation.HasCompleted, "Operation should be completed"); + Assert.IsTrue(analyzeOperation.HasValue, "Operation should have a value"); + Assert.IsNotNull(analyzeOperation.GetRawResponse(), "Analyze operation should have a raw response"); + Assert.IsTrue(analyzeOperation.GetRawResponse().Status >= 200 && analyzeOperation.GetRawResponse().Status < 300, + $"Response status should be successful, but was {analyzeOperation.GetRawResponse().Status}"); + Console.WriteLine("Analyze operation properties verified"); + + Assert.IsNotNull(analyzeResult, "Analyze result should not be null"); + Assert.IsNotNull(analyzeResult.Contents, "Result should contain contents"); + Assert.IsTrue(analyzeResult.Contents!.Count > 0, "Result should have at least one content"); + Assert.AreEqual(1, analyzeResult.Contents.Count, "Result should have exactly one content element"); + Console.WriteLine($"Analysis result contains {analyzeResult.Contents.Count} content(s)"); + + var documentContent = analyzeResult.Contents?.FirstOrDefault() as DocumentContent; + Assert.IsNotNull(documentContent, "Content should be DocumentContent"); + Assert.IsNotNull(documentContent!.Fields, "Document content should have fields"); + Console.WriteLine($"Document content has {documentContent.Fields.Count} field(s)"); + + // Verify company_name field (Extract method) + if (documentContent.Fields.TryGetValue("company_name", out var companyNameFieldAssert)) + { + Console.WriteLine("company_name field found"); + Assert.IsTrue(companyNameFieldAssert is StringField, "company_name should be a StringField"); + + if (companyNameFieldAssert is StringField cnf && !string.IsNullOrWhiteSpace(cnf.ValueString)) + { + Console.WriteLine($" Value: {cnf.ValueString}"); + } + + if (companyNameFieldAssert.Confidence.HasValue) + { + Assert.IsTrue(companyNameFieldAssert.Confidence.Value >= 0 && companyNameFieldAssert.Confidence.Value <= 1, + $"company_name confidence should be between 0 and 1, but was {companyNameFieldAssert.Confidence.Value}"); + Console.WriteLine($" Confidence: {companyNameFieldAssert.Confidence.Value:F2}"); + } + + if (!string.IsNullOrWhiteSpace(companyNameFieldAssert.Source)) + { + Assert.IsTrue(companyNameFieldAssert.Source.StartsWith("D("), + "Source should start with 'D(' for extracted fields"); + Console.WriteLine($" Source: {companyNameFieldAssert.Source}"); + } + + if (companyNameFieldAssert.Spans != null && companyNameFieldAssert.Spans.Count > 0) + { + Assert.IsTrue(companyNameFieldAssert.Spans.Count > 0, "Spans should not be empty when not null"); + foreach (var span in companyNameFieldAssert.Spans) + { + Assert.IsTrue(span.Offset >= 0, $"Span offset should be >= 0, but was {span.Offset}"); + Assert.IsTrue(span.Length > 0, $"Span length should be > 0, but was {span.Length}"); + } + Console.WriteLine($" Spans: {companyNameFieldAssert.Spans.Count} span(s)"); + } + } + else + { + Console.WriteLine("āš ļø company_name field not found"); + } + + // Verify total_amount field (Extract method) + if (documentContent.Fields.TryGetValue("total_amount", out var totalAmountFieldAssert)) + { + Console.WriteLine("total_amount field found"); + Assert.IsTrue(totalAmountFieldAssert is NumberField, "total_amount should be a NumberField"); + + if (totalAmountFieldAssert is NumberField nfAssert && nfAssert.ValueNumber.HasValue) + { + Assert.IsTrue(nfAssert.ValueNumber.Value >= 0, $"total_amount should be >= 0, but was {nfAssert.ValueNumber.Value}"); + Console.WriteLine($" Value: {nfAssert.ValueNumber.Value:F2}"); + } + + if (totalAmountFieldAssert.Confidence.HasValue) + { + Assert.IsTrue(totalAmountFieldAssert.Confidence.Value >= 0 && totalAmountFieldAssert.Confidence.Value <= 1, + $"total_amount confidence should be between 0 and 1, but was {totalAmountFieldAssert.Confidence.Value}"); + Console.WriteLine($" Confidence: {totalAmountFieldAssert.Confidence.Value:F2}"); + } + + if (!string.IsNullOrEmpty(totalAmountFieldAssert.Source)) + { + Assert.IsTrue(totalAmountFieldAssert.Source.StartsWith("D("), + "Source should start with 'D(' for extracted fields"); + Console.WriteLine($" Source: {totalAmountFieldAssert.Source}"); + } + + if (totalAmountFieldAssert.Spans != null && totalAmountFieldAssert.Spans.Count > 0) + { + Assert.IsTrue(totalAmountFieldAssert.Spans.Count > 0, "Spans should not be empty when not null"); + foreach (var span in totalAmountFieldAssert.Spans) + { + Assert.IsTrue(span.Offset >= 0, $"Span offset should be >= 0, but was {span.Offset}"); + Assert.IsTrue(span.Length > 0, $"Span length should be > 0, but was {span.Length}"); + } + Console.WriteLine($" Spans: {totalAmountFieldAssert.Spans.Count} span(s)"); + } + } + else + { + Console.WriteLine("āš ļø total_amount field not found"); + } + + // Verify document_summary field (Generate method) + if (documentContent.Fields.TryGetValue("document_summary", out var summaryFieldAssert)) + { + Console.WriteLine("document_summary field found"); + Assert.IsTrue(summaryFieldAssert is StringField, "document_summary should be a StringField"); + + if (summaryFieldAssert is StringField dsf && !string.IsNullOrWhiteSpace(dsf.ValueString)) + { + Assert.IsTrue(dsf.ValueString.Length > 0, "document_summary should not be empty when present"); + Console.WriteLine($" Value: {dsf.ValueString.Substring(0, Math.Min(100, dsf.ValueString.Length))}..."); + } + + if (summaryFieldAssert.Confidence.HasValue) + { + Assert.IsTrue(summaryFieldAssert.Confidence.Value >= 0 && summaryFieldAssert.Confidence.Value <= 1, + $"document_summary confidence should be between 0 and 1, but was {summaryFieldAssert.Confidence.Value}"); + Console.WriteLine($" Confidence: {summaryFieldAssert.Confidence.Value:F2}"); + } + + // Note: Generated fields may not have source or spans + if (!string.IsNullOrEmpty(summaryFieldAssert.Source)) + { + Console.WriteLine($" Source: {summaryFieldAssert.Source}"); + } + } + else + { + Console.WriteLine("āš ļø document_summary field not found"); + } + + // Verify document_type field (Classify method) + if (documentContent.Fields.TryGetValue("document_type", out var documentTypeFieldAssert)) + { + Console.WriteLine("document_type field found"); + Assert.IsTrue(documentTypeFieldAssert is StringField, "document_type should be a StringField"); + + if (documentTypeFieldAssert.Confidence.HasValue) + { + Assert.IsTrue(documentTypeFieldAssert.Confidence.Value >= 0 && documentTypeFieldAssert.Confidence.Value <= 1, + $"document_type confidence should be between 0 and 1, but was {documentTypeFieldAssert.Confidence.Value}"); + Console.WriteLine($" Confidence: {documentTypeFieldAssert.Confidence.Value:F2}"); + } + + // Verify the classified value is one of the predefined enum values if present + if (documentTypeFieldAssert is StringField sfAssert && !string.IsNullOrWhiteSpace(sfAssert.ValueString)) + { + var validTypes = new[] { "invoice", "receipt", "contract", "report", "other" }; + Assert.IsTrue(validTypes.Contains(sfAssert.ValueString), + $"document_type should be one of the predefined values, but got: {sfAssert.ValueString}"); + Console.WriteLine($" Value: {sfAssert.ValueString}"); + } + + // Note: Classified fields may not have source or spans + if (!string.IsNullOrEmpty(documentTypeFieldAssert.Source)) + { + Console.WriteLine($" Source: {documentTypeFieldAssert.Source}"); + } + } + else + { + Console.WriteLine("āš ļø document_type field not found"); + } + + Console.WriteLine("All custom analyzer usage properties validated successfully"); + #endregion + + // Clean up: delete the analyzer (for testing purposes only) + // In production, analyzers are typically kept and reused +#if SNIPPET + await client.DeleteAnalyzerAsync(analyzerId); + Console.WriteLine($"Analyzer '{analyzerId}' deleted successfully."); +#else + try + { + await client.DeleteAnalyzerAsync(analyzerId); + Console.WriteLine($"Analyzer '{analyzerId}' deleted successfully."); + } + catch + { + // Ignore cleanup errors in tests + } +#endif + } + finally + { + // Ensure cleanup even if snippet code fails + try + { + await client.DeleteAnalyzerAsync(analyzerId); + } + catch + { + // Ignore cleanup errors in tests + } + } + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample05_CreateClassifier.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample05_CreateClassifier.cs new file mode 100644 index 000000000000..e7b869a3bd95 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample05_CreateClassifier.cs @@ -0,0 +1,558 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +#nullable enable + +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Threading.Tasks; +using Azure; +using Azure.AI.ContentUnderstanding; +using Azure.AI.ContentUnderstanding.Tests; +using Azure.Core; +using Azure.Core.TestFramework; +using NUnit.Framework; + +namespace Azure.AI.ContentUnderstanding.Samples +{ + public partial class ContentUnderstandingSamples + { + [RecordedTest] + public async Task CreateClassifierAsync() + { + string endpoint = TestEnvironment.Endpoint; + var options = InstrumentClientOptions(new ContentUnderstandingClientOptions()); + var client = InstrumentClient(new ContentUnderstandingClient(new Uri(endpoint), TestEnvironment.Credential, options)); + + #region Snippet:ContentUnderstandingCreateClassifier +#if SNIPPET + // Define content categories for classification + var categories = new Dictionary + { + ["Loan_Application"] = new ContentCategory + { + Description = "Documents submitted by individuals or businesses to request funding, typically including personal or business details, financial history, loan amount, purpose, and supporting documentation." + }, + ["Invoice"] = new ContentCategory + { + Description = "Billing documents issued by sellers or service providers to request payment for goods or services, detailing items, prices, taxes, totals, and payment terms." + }, + ["Bank_Statement"] = new ContentCategory + { + Description = "Official statements issued by banks that summarize account activity over a period, including deposits, withdrawals, fees, and balances." + } + }; + + // Create analyzer configuration + var config = new ContentAnalyzerConfig + { + ReturnDetails = true, + EnableSegment = true // Enable automatic segmentation by category + }; + + // Add categories to config + foreach (var kvp in categories) + { + config.ContentCategories.Add(kvp.Key, kvp.Value); + } + + // Create the classifier analyzer + var classifier = new ContentAnalyzer + { + BaseAnalyzerId = "prebuilt-document", + Description = "Custom classifier for financial document categorization", + Config = config + }; + classifier.Models.Add("completion", "gpt-4.1"); + + // Create the classifier + string analyzerId = $"my_classifier_{DateTimeOffset.UtcNow.ToUnixTimeSeconds()}"; +#else + // Define content categories for classification + var categories = new Dictionary + { + ["Loan_Application"] = new ContentCategory + { + Description = "Documents submitted by individuals or businesses to request funding, typically including personal or business details, financial history, loan amount, purpose, and supporting documentation." + }, + ["Invoice"] = new ContentCategory + { + Description = "Billing documents issued by sellers or service providers to request payment for goods or services, detailing items, prices, taxes, totals, and payment terms." + }, + ["Bank_Statement"] = new ContentCategory + { + Description = "Official statements issued by banks that summarize account activity over a period, including deposits, withdrawals, fees, and balances." + } + }; + + // Create analyzer configuration + var config = new ContentAnalyzerConfig + { + ReturnDetails = true, + EnableSegment = true // Enable automatic segmentation by category + }; + + // Add categories to config + foreach (var kvp in categories) + { + config.ContentCategories.Add(kvp.Key, kvp.Value); + } + + // Create the classifier analyzer + var classifier = new ContentAnalyzer + { + BaseAnalyzerId = "prebuilt-document", + Description = "Custom classifier for financial document categorization", + Config = config + }; + classifier.Models.Add("completion", "gpt-4.1"); + + // Generate a unique analyzer ID and record it for playback + string defaultId = $"test_classifier_{Recording.Random.NewGuid().ToString("N")}"; + string analyzerId = Recording.GetVariable("analyzerId", defaultId) ?? defaultId; +#endif + var operation = await client.CreateAnalyzerAsync( + WaitUntil.Completed, + analyzerId, + classifier); + + ContentAnalyzer result = operation.Value; + Console.WriteLine($"Classifier '{analyzerId}' created successfully!"); + #endregion + + #region Assertion:ContentUnderstandingCreateClassifier + Assert.IsNotNull(analyzerId, "Analyzer ID should not be null"); + Assert.IsFalse(string.IsNullOrWhiteSpace(analyzerId), "Analyzer ID should not be empty"); + Assert.IsNotNull(categories, "Categories dictionary should not be null"); + Assert.AreEqual(3, categories.Count, "Should have 3 categories defined"); + Assert.IsNotNull(config, "Classifier config should not be null"); + Assert.IsNotNull(classifier, "Classifier should not be null"); + Assert.IsNotNull(operation, "Create classifier operation should not be null"); + Assert.IsTrue(operation.HasCompleted, "Operation should be completed"); + Assert.IsTrue(operation.HasValue, "Operation should have a value"); + Assert.IsNotNull(operation.GetRawResponse(), "Create classifier operation should have a raw response"); + Assert.IsTrue(operation.GetRawResponse().Status >= 200 && operation.GetRawResponse().Status < 300, + $"Response status should be successful, but was {operation.GetRawResponse().Status}"); + Console.WriteLine("Create classifier operation properties verified"); + + Assert.IsNotNull(result, "Classifier result should not be null"); + Console.WriteLine($"Classifier '{analyzerId}' created successfully"); + + // Verify base analyzer + Assert.IsNotNull(result.BaseAnalyzerId, "Base analyzer ID should not be null"); + Assert.AreEqual("prebuilt-document", result.BaseAnalyzerId, "Base analyzer ID should match"); + Console.WriteLine($"Base analyzer ID verified: {result.BaseAnalyzerId}"); + + // Verify classifier config + Assert.IsNotNull(result.Config, "Classifier config should not be null"); + Assert.IsTrue(result.Config.ReturnDetails, "ReturnDetails should be true"); + Assert.IsTrue(result.Config.EnableSegment == true, "EnableSegment should be true"); + Console.WriteLine("Classifier config verified (ReturnDetails=true, EnableSegment=true)"); + + // Verify content categories + Assert.IsNotNull(result.Config.ContentCategories, "Content categories should not be null"); + Assert.AreEqual(3, result.Config.ContentCategories.Count, "Should have 3 content categories"); + Console.WriteLine($"Content categories count verified: {result.Config.ContentCategories.Count}"); + + // Verify Loan_Application category + Assert.IsTrue(result.Config.ContentCategories.ContainsKey("Loan_Application"), + "Should contain Loan_Application category"); + var loanCategory = result.Config.ContentCategories["Loan_Application"]; + Assert.IsNotNull(loanCategory, "Loan_Application category should not be null"); + Assert.IsFalse(string.IsNullOrWhiteSpace(loanCategory.Description), + "Loan_Application description should not be empty"); + Assert.IsTrue(loanCategory.Description.Contains("funding") || loanCategory.Description.Contains("loan"), + "Loan_Application description should be relevant"); + Console.WriteLine(" Loan_Application category verified"); + + // Verify Invoice category + Assert.IsTrue(result.Config.ContentCategories.ContainsKey("Invoice"), + "Should contain Invoice category"); + var invoiceCategory = result.Config.ContentCategories["Invoice"]; + Assert.IsNotNull(invoiceCategory, "Invoice category should not be null"); + Assert.IsFalse(string.IsNullOrWhiteSpace(invoiceCategory.Description), + "Invoice description should not be empty"); + Assert.IsTrue(invoiceCategory.Description.Contains("billing") || invoiceCategory.Description.Contains("payment"), + "Invoice description should be relevant"); + Console.WriteLine(" Invoice category verified"); + + // Verify Bank_Statement category + Assert.IsTrue(result.Config.ContentCategories.ContainsKey("Bank_Statement"), + "Should contain Bank_Statement category"); + var bankCategory = result.Config.ContentCategories["Bank_Statement"]; + Assert.IsNotNull(bankCategory, "Bank_Statement category should not be null"); + Assert.IsFalse(string.IsNullOrWhiteSpace(bankCategory.Description), + "Bank_Statement description should not be empty"); + Assert.IsTrue(bankCategory.Description.Contains("bank") || bankCategory.Description.Contains("account"), + "Bank_Statement description should be relevant"); + Console.WriteLine(" Bank_Statement category verified"); + + // Verify models + Assert.IsNotNull(result.Models, "Models should not be null"); + Assert.IsTrue(result.Models.Count >= 1, "Should have at least 1 model mapping"); + Assert.IsTrue(result.Models.ContainsKey("completion"), "Should contain 'completion' model mapping"); + Assert.AreEqual("gpt-4.1", result.Models["completion"], "Completion model should be 'gpt-4.1'"); + Console.WriteLine($"Model mappings verified: {result.Models.Count} model(s)"); + + // Verify description + if (!string.IsNullOrWhiteSpace(result.Description)) + { + Assert.IsTrue(result.Description.Contains("classifier") || result.Description.Contains("categorization"), + "Description should be relevant to classification"); + Console.WriteLine($"Classifier description: {result.Description}"); + } + + Console.WriteLine("All classifier creation properties validated successfully"); + #endregion + + #region Snippet:ContentUnderstandingDeleteClassifier + // Clean up: delete the classifier (for testing purposes only) + // In production, classifiers are typically kept and reused +#if SNIPPET + await client.DeleteAnalyzerAsync(analyzerId); + Console.WriteLine($"Classifier '{analyzerId}' deleted successfully."); +#else + try + { + await client.DeleteAnalyzerAsync(analyzerId); + Console.WriteLine($"Classifier '{analyzerId}' deleted successfully."); + } + catch + { + // Ignore cleanup errors in tests + } +#endif + #endregion + } + + [RecordedTest] + public async Task AnalyzeCategoryAsync() + { + string endpoint = TestEnvironment.Endpoint; + var options = InstrumentClientOptions(new ContentUnderstandingClientOptions()); + var client = InstrumentClient(new ContentUnderstandingClient(new Uri(endpoint), TestEnvironment.Credential, options)); + + // First create a classifier without segmentation + string defaultId = $"test_classifier_{Recording.Random.NewGuid().ToString("N")}"; + string analyzerId = Recording.GetVariable("analyzerId_no_segment", defaultId) ?? defaultId; + var config = new ContentAnalyzerConfig + { + ReturnDetails = true, + EnableSegment = false // No automatic segmentation + }; + config.ContentCategories.Add("Invoice", new ContentCategory + { + Description = "Billing documents issued by sellers or service providers to request payment for goods or services." + }); + + var classifier = new ContentAnalyzer + { + BaseAnalyzerId = "prebuilt-document", + Description = "Custom classifier for financial document categorization without segmentation", + Config = config + }; + classifier.Models.Add("completion", "gpt-4.1"); + + await client.CreateAnalyzerAsync( + WaitUntil.Completed, + analyzerId, + classifier); + + try + { + #region Snippet:ContentUnderstandingAnalyzeCategory +#if SNIPPET + // Analyze a document (EnableSegment=false means entire document is one category) + string filePath = ""; + byte[] fileBytes = File.ReadAllBytes(filePath); + AnalyzeResultOperation analyzeOperation = await client.AnalyzeBinaryAsync( + WaitUntil.Completed, + analyzerId, + "application/pdf", + BinaryData.FromBytes(fileBytes)); +#else + // Analyze a document (EnableSegment=false means entire document is one category) + var filePath = ContentUnderstandingClientTestEnvironment.CreatePath("mixed_financial_docs.pdf"); + var fileBytes = File.ReadAllBytes(filePath); + AnalyzeResultOperation analyzeOperation = await client.AnalyzeBinaryAsync( + WaitUntil.Completed, + analyzerId, + "application/pdf", + BinaryData.FromBytes(fileBytes)); +#endif + + var analyzeResult = analyzeOperation.Value; + + // Display classification results + if (analyzeResult.Contents?.FirstOrDefault() is DocumentContent docContent) + { + Console.WriteLine($"Pages: {docContent.StartPageNumber}-{docContent.EndPageNumber}"); + + // With EnableSegment=false, the document is classified as a single unit + if (docContent.Segments != null && docContent.Segments.Count > 0) + { + foreach (var segment in docContent.Segments) + { + Console.WriteLine($"Category: {segment.Category ?? "(unknown)"}"); + Console.WriteLine($"Pages: {segment.StartPageNumber}-{segment.EndPageNumber}"); + } + } + } + #endregion + + #region Assertion:ContentUnderstandingAnalyzeCategory + Assert.IsTrue(File.Exists(filePath), $"Sample file not found at {filePath}"); + Assert.IsTrue(fileBytes.Length > 0, "File should not be empty"); + Assert.IsNotNull(analyzeOperation, "Analyze operation with segmentation should not be null"); + Assert.IsTrue(analyzeOperation.HasCompleted, "Operation should be completed"); + Assert.IsTrue(analyzeOperation.HasValue, "Operation should have a value"); + Assert.IsNotNull(analyzeOperation.GetRawResponse(), "Analyze operation with segmentation should have a raw response"); + Assert.IsTrue(analyzeOperation.GetRawResponse().Status >= 200 && analyzeOperation.GetRawResponse().Status < 300, + $"Response status should be successful, but was {analyzeOperation.GetRawResponse().Status}"); + Console.WriteLine("Analyze operation with segmentation properties verified"); + + Assert.IsNotNull(analyzeResult, "Analyze result should not be null"); + Assert.IsNotNull(analyzeResult.Contents, "Result should contain contents"); + Assert.IsTrue(analyzeResult.Contents!.Count > 0, "Result should have at least one content"); + Assert.AreEqual(1, analyzeResult.Contents.Count, "Result should have exactly one content element"); + Console.WriteLine($"Analysis result contains {analyzeResult.Contents.Count} content(s)"); + + var documentContent = analyzeResult.Contents?.FirstOrDefault() as DocumentContent; + Assert.IsNotNull(documentContent, "Content should be DocumentContent"); + Assert.IsTrue(documentContent!.StartPageNumber >= 1, "Start page should be >= 1"); + Assert.IsTrue(documentContent.EndPageNumber >= documentContent.StartPageNumber, + "End page should be >= start page"); + int totalPages = documentContent.EndPageNumber - documentContent.StartPageNumber + 1; + Assert.IsTrue(totalPages > 0, "Total pages should be positive"); + Console.WriteLine($"Document has {totalPages} page(s) from {documentContent.StartPageNumber} to {documentContent.EndPageNumber}"); + + // With EnableSegment=true, we expect automatic segmentation + if (documentContent.Segments != null && documentContent.Segments.Count > 0) + { + Assert.IsTrue(documentContent.Segments.Count >= 1, + "Should have at least one segment with EnableSegment=true"); + Console.WriteLine($"Document has {documentContent.Segments.Count} segment(s) (EnableSegment=true, automatic segmentation)"); + + // Verify segments cover the entire document without gaps or overlaps + var sortedSegments = documentContent.Segments.OrderBy(s => s.StartPageNumber).ToList(); + int segmentIndex = 1; + int? lastEndPage = null; + + foreach (var segment in sortedSegments) + { + Assert.IsNotNull(segment, $"Segment {segmentIndex} should not be null"); + Assert.IsTrue(segment.StartPageNumber >= 1, + $"Segment {segmentIndex} start page should be >= 1, but was {segment.StartPageNumber}"); + Assert.IsTrue(segment.EndPageNumber >= segment.StartPageNumber, + $"Segment {segmentIndex} end page should be >= start page"); + Assert.IsTrue(segment.StartPageNumber >= documentContent.StartPageNumber && + segment.EndPageNumber <= documentContent.EndPageNumber, + $"Segment {segmentIndex} page range [{segment.StartPageNumber}, {segment.EndPageNumber}] should be within document page range [{documentContent.StartPageNumber}, {documentContent.EndPageNumber}]"); + + // Check for gaps or overlaps (optional, depending on service behavior) + if (lastEndPage.HasValue) + { + // Segments should be contiguous (no gaps) or may overlap depending on service design + // This assertion can be adjusted based on actual service behavior + if (segment.StartPageNumber > lastEndPage.Value + 1) + { + Console.WriteLine($" āš ļø Gap detected between segment {segmentIndex - 1} and {segmentIndex}"); + } + else if (segment.StartPageNumber <= lastEndPage.Value) + { + Console.WriteLine($" āš ļø Overlap detected between segment {segmentIndex - 1} and {segmentIndex}"); + } + } + lastEndPage = segment.EndPageNumber; + + int segmentPages = segment.EndPageNumber - segment.StartPageNumber + 1; + Console.WriteLine($" Segment {segmentIndex}: Pages {segment.StartPageNumber}-{segment.EndPageNumber} ({segmentPages} page(s))"); + + if (!string.IsNullOrEmpty(segment.Category)) + { + // Verify category is one of the defined categories + var validCategories = new[] { "Invoice", "Loan_Application", "Bank_Statement" }; + if (validCategories.Any(c => string.Equals(c, segment.Category, StringComparison.Ordinal))) + { + TestContext.WriteLine($" Category: {segment.Category}"); + } + else + { + TestContext.WriteLine($" Category: {segment.Category} (not in predefined list)"); + } + } + else + { + Console.WriteLine($" Category: (not specified)"); + } + + if (!string.IsNullOrEmpty(segment.SegmentId)) + { + Assert.IsFalse(string.IsNullOrWhiteSpace(segment.SegmentId), + $"Segment {segmentIndex} ID should not be whitespace"); + Console.WriteLine($" Segment ID: {segment.SegmentId}"); + } + else + { + Console.WriteLine($" Segment ID: (not available)"); + } + + segmentIndex++; + } + + // Verify total coverage (all segments together should cover the document) + var minSegmentPage = sortedSegments.Min(s => s.StartPageNumber); + var maxSegmentPage = sortedSegments.Max(s => s.EndPageNumber); + Assert.IsTrue(minSegmentPage <= documentContent.StartPageNumber, + "Segments should start at or before document start page"); + Assert.IsTrue(maxSegmentPage >= documentContent.EndPageNumber, + "Segments should end at or after document end page"); + Console.WriteLine($"Segments cover page range [{minSegmentPage}, {maxSegmentPage}]"); + } + else + { + Console.WriteLine("āš ļø No segments found in document content (unexpected with EnableSegment=true)"); + } + + Console.WriteLine("All category analysis with segmentation properties validated successfully"); + #endregion + } + finally + { + // Clean up: delete the classifier + try + { + await client.DeleteAnalyzerAsync(analyzerId); + } + catch + { + // Ignore cleanup errors in tests + } + } + } + + [RecordedTest] + public async Task AnalyzeCategoryWithSegmentsAsync() + { + string endpoint = TestEnvironment.Endpoint; + var options = InstrumentClientOptions(new ContentUnderstandingClientOptions()); + var client = InstrumentClient(new ContentUnderstandingClient(new Uri(endpoint), TestEnvironment.Credential, options)); + + // First create a classifier with segmentation + string defaultId = $"test_classifier_{Recording.Random.NewGuid().ToString("N")}"; + string analyzerId = Recording.GetVariable("analyzerId_with_segment", defaultId) ?? defaultId; + var config = new ContentAnalyzerConfig + { + ReturnDetails = true, + EnableSegment = true // Enable automatic segmentation + }; + config.ContentCategories.Add("Invoice", new ContentCategory + { + Description = "Billing documents issued by sellers or service providers to request payment for goods or services." + }); + + var classifier = new ContentAnalyzer + { + BaseAnalyzerId = "prebuilt-document", + Description = "Custom classifier for financial document categorization with automatic segmentation", + Config = config + }; + classifier.Models.Add("completion", "gpt-4.1"); + + await client.CreateAnalyzerAsync( + WaitUntil.Completed, + analyzerId, + classifier); + + try + { + #region Snippet:ContentUnderstandingAnalyzeCategoryWithSegments +#if SNIPPET + // Analyze a document (EnableSegment=true automatically segments by category) + string filePath = ""; + byte[] fileBytes = File.ReadAllBytes(filePath); + AnalyzeResultOperation analyzeOperation = await client.AnalyzeBinaryAsync( + WaitUntil.Completed, + analyzerId, + "application/pdf", + BinaryData.FromBytes(fileBytes)); +#else + // Analyze a document (EnableSegment=true automatically segments by category) + var filePath = ContentUnderstandingClientTestEnvironment.CreatePath("mixed_financial_docs.pdf"); + var fileBytes = File.ReadAllBytes(filePath); + AnalyzeResultOperation analyzeOperation = await client.AnalyzeBinaryAsync( + WaitUntil.Completed, + analyzerId, + "application/pdf", + BinaryData.FromBytes(fileBytes)); +#endif + + var analyzeResult = analyzeOperation.Value; + + // Display classification results with automatic segmentation + if (analyzeResult.Contents?.FirstOrDefault() is DocumentContent docContent) + { + if (docContent.Segments != null && docContent.Segments.Count > 0) + { + Console.WriteLine($"Found {docContent.Segments.Count} segment(s):"); + foreach (var segment in docContent.Segments) + { + Console.WriteLine($" Category: {segment.Category ?? "(unknown)"}"); + Console.WriteLine($" Pages: {segment.StartPageNumber}-{segment.EndPageNumber}"); + Console.WriteLine($" Segment ID: {segment.SegmentId ?? "(not available)"}"); + } + } + } + #endregion + + #region Assertion:ContentUnderstandingAnalyzeCategoryWithSegments + Assert.IsTrue(File.Exists(filePath), $"Sample file not found at {filePath}"); + Assert.IsNotNull(analyzeOperation, "Analyze operation with segmentation should not be null"); + Assert.IsNotNull(analyzeOperation.GetRawResponse(), "Analyze operation with segmentation should have a raw response"); + Console.WriteLine("Analyze operation with segmentation properties verified"); + Assert.IsNotNull(analyzeResult, "Analyze result should not be null"); + Assert.IsNotNull(analyzeResult.Contents, "Result should contain contents"); + Assert.IsTrue(analyzeResult.Contents!.Count > 0, "Result should have at least one content"); + + var documentContent = analyzeResult.Contents?.FirstOrDefault() as DocumentContent; + Assert.IsNotNull(documentContent, "Content should be DocumentContent"); + + // With EnableSegment=true, we expect automatic segmentation + if (documentContent!.Segments != null && documentContent.Segments.Count > 0) + { + Assert.IsTrue(documentContent.Segments.Count >= 1, + "Should have at least one segment with EnableSegment=true"); + + foreach (var segment in documentContent.Segments) + { + Assert.IsTrue(segment.StartPageNumber >= 1, + "Segment start page should be >= 1"); + Assert.IsTrue(segment.EndPageNumber >= segment.StartPageNumber, + "Segment end page should be >= start page"); + Assert.IsTrue(segment.StartPageNumber >= documentContent.StartPageNumber && + segment.EndPageNumber <= documentContent.EndPageNumber, + "Segment page range should be within document page range"); + + // SegmentId may or may not be available depending on the service response + // Category may be null or unknown for some segments + } + } + #endregion + } + finally + { + // Clean up: delete the classifier + try + { + await client.DeleteAnalyzerAsync(analyzerId); + } + catch + { + // Ignore cleanup errors in tests + } + } + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample06_GetAnalyzer.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample06_GetAnalyzer.cs new file mode 100644 index 000000000000..6f3ff0c2cda3 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample06_GetAnalyzer.cs @@ -0,0 +1,434 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +#nullable enable + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text.Json; +using System.Threading.Tasks; +using Azure; +using Azure.AI.ContentUnderstanding; +using Azure.AI.ContentUnderstanding.Tests; +using Azure.Core; +using Azure.Core.TestFramework; +using Azure.Identity; +using NUnit.Framework; + +namespace Azure.AI.ContentUnderstanding.Samples +{ + public partial class ContentUnderstandingSamples + { + [RecordedTest] + public async Task GetPrebuiltAnalyzerAsync() + { + string endpoint = TestEnvironment.Endpoint; + var options = InstrumentClientOptions(new ContentUnderstandingClientOptions()); + var client = InstrumentClient(new ContentUnderstandingClient(new Uri(endpoint), TestEnvironment.Credential, options)); + + #region Snippet:ContentUnderstandingGetPrebuiltAnalyzer +#if SNIPPET + // Get information about a prebuilt analyzer + var response = await client.GetAnalyzerAsync("prebuilt-documentSearch"); +#else + // Get information about a prebuilt analyzer + var response = await client.GetAnalyzerAsync("prebuilt-documentSearch"); +#endif + ContentAnalyzer analyzer = response.Value; + + // Display full analyzer JSON + var jsonOptions = new JsonSerializerOptions + { + WriteIndented = true, + DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull + }; + string analyzerJson = JsonSerializer.Serialize(analyzer, jsonOptions); + Console.WriteLine("Prebuilt-documentSearch Analyzer:"); + Console.WriteLine(analyzerJson); + #endregion + + #region Assertion:ContentUnderstandingGetPrebuiltAnalyzer + Assert.IsNotNull(response, "Response should not be null"); + Assert.IsTrue(response.HasValue, "Response should have a value"); + Assert.IsNotNull(analyzer, "Analyzer should not be null"); + Console.WriteLine("Get prebuilt analyzer response verified"); + + // Verify raw response + var rawResponse = response.GetRawResponse(); + Assert.IsNotNull(rawResponse, "Raw response should not be null"); + Assert.AreEqual(200, rawResponse.Status, "Response status should be 200"); + Assert.IsNotNull(rawResponse.Content, "Response content should not be null"); + Console.WriteLine($"Raw response status: {rawResponse.Status}"); + + // Verify analyzer can be serialized to JSON + Assert.IsNotNull(analyzerJson, "Analyzer JSON should not be null"); + Assert.IsTrue(analyzerJson.Length > 0, "Analyzer JSON should not be empty"); + Assert.IsTrue(analyzerJson.Contains("prebuilt-documentSearch") || analyzerJson.Contains("documentSearch"), + "Analyzer JSON should contain analyzer identifier"); + Console.WriteLine($"Analyzer JSON length: {analyzerJson.Length} characters"); + + // Verify basic analyzer properties for prebuilt-documentSearch + if (!string.IsNullOrWhiteSpace(analyzer.BaseAnalyzerId)) + { + Console.WriteLine($"Base analyzer ID: {analyzer.BaseAnalyzerId}"); + } + + if (!string.IsNullOrWhiteSpace(analyzer.Description)) + { + Console.WriteLine($"Description: {analyzer.Description}"); + } + + // Verify config if present + if (analyzer.Config != null) + { + Console.WriteLine("Analyzer has configuration"); + if (analyzer.Config.EnableOcr.HasValue) + { + Console.WriteLine($" EnableOcr: {analyzer.Config.EnableOcr.Value}"); + } + if (analyzer.Config.EnableLayout.HasValue) + { + Console.WriteLine($" EnableLayout: {analyzer.Config.EnableLayout.Value}"); + } + } + + // Verify models if present + if (analyzer.Models != null && analyzer.Models.Count > 0) + { + Console.WriteLine($"Analyzer has {analyzer.Models.Count} model mapping(s)"); + foreach (var model in analyzer.Models) + { + Console.WriteLine($" {model.Key}: {model.Value}"); + } + } + + Console.WriteLine("All prebuilt analyzer properties validated successfully"); + #endregion + } + + [RecordedTest] + public async Task GetPrebuiltInvoiceAsync() + { + string endpoint = TestEnvironment.Endpoint; + var options = InstrumentClientOptions(new ContentUnderstandingClientOptions()); + var client = InstrumentClient(new ContentUnderstandingClient(new Uri(endpoint), TestEnvironment.Credential, options)); + + #region Snippet:ContentUnderstandingGetPrebuiltInvoice +#if SNIPPET + // Get information about prebuilt-invoice analyzer + var invoiceResponse = await client.GetAnalyzerAsync("prebuilt-invoice"); +#else + // Get information about prebuilt-invoice analyzer + var invoiceResponse = await client.GetAnalyzerAsync("prebuilt-invoice"); +#endif + ContentAnalyzer invoiceAnalyzer = invoiceResponse.Value; + + // Display full analyzer JSON + var jsonOptions = new JsonSerializerOptions + { + WriteIndented = true, + DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull + }; + string invoiceAnalyzerJson = JsonSerializer.Serialize(invoiceAnalyzer, jsonOptions); + Console.WriteLine("Prebuilt-invoice Analyzer:"); + Console.WriteLine(invoiceAnalyzerJson); + #endregion + + #region Assertion:ContentUnderstandingGetPrebuiltInvoice + Assert.IsNotNull(invoiceResponse, "Response should not be null"); + Assert.IsTrue(invoiceResponse.HasValue, "Response should have a value"); + Assert.IsNotNull(invoiceAnalyzer, "Invoice analyzer should not be null"); + Console.WriteLine("Get prebuilt invoice analyzer response verified"); + + // Verify raw response + var rawResponse = invoiceResponse.GetRawResponse(); + Assert.IsNotNull(rawResponse, "Raw response should not be null"); + Assert.AreEqual(200, rawResponse.Status, "Response status should be 200"); + Assert.IsNotNull(rawResponse.Content, "Response content should not be null"); + Console.WriteLine($"Raw response status: {rawResponse.Status}"); + + // Verify analyzer can be serialized to JSON + Assert.IsNotNull(invoiceAnalyzerJson, "Invoice analyzer JSON should not be null"); + Assert.IsTrue(invoiceAnalyzerJson.Length > 0, "Invoice analyzer JSON should not be empty"); + Assert.IsTrue(invoiceAnalyzerJson.Contains("invoice") || invoiceAnalyzerJson.Contains("Invoice"), + "Invoice analyzer JSON should contain 'invoice'"); + Console.WriteLine($"Invoice analyzer JSON length: {invoiceAnalyzerJson.Length} characters"); + + // Verify invoice analyzer has field schema (prebuilt-invoice should have predefined fields) + Assert.IsNotNull(invoiceAnalyzer.FieldSchema, "Invoice analyzer should have field schema"); + Assert.IsNotNull(invoiceAnalyzer.FieldSchema!.Fields, "Invoice analyzer should have fields"); + Assert.IsTrue(invoiceAnalyzer.FieldSchema.Fields.Count > 0, + "Invoice analyzer should have at least one field"); + Console.WriteLine($"Invoice analyzer has {invoiceAnalyzer.FieldSchema.Fields.Count} field(s)"); + + // Verify common invoice fields + var commonFields = new[] { "CustomerName", "InvoiceDate", "TotalAmount", "LineItems" }; + int foundFields = 0; + foreach (var fieldName in commonFields) + { + if (invoiceAnalyzer.FieldSchema.Fields.ContainsKey(fieldName)) + { + foundFields++; + var field = invoiceAnalyzer.FieldSchema.Fields[fieldName]; + Console.WriteLine($" {fieldName} field found (Type: {field.Type})"); + + Assert.IsFalse(string.IsNullOrWhiteSpace(field.Description), + $"{fieldName} should have a description"); + } + } + + if (foundFields > 0) + { + Console.WriteLine($"Found {foundFields} common invoice fields"); + } + else + { + Console.WriteLine("āš ļø No common invoice fields found (field names may differ)"); + } + + // Verify field schema metadata + if (!string.IsNullOrWhiteSpace(invoiceAnalyzer.FieldSchema.Name)) + { + Console.WriteLine($"Field schema name: {invoiceAnalyzer.FieldSchema.Name}"); + } + + if (!string.IsNullOrWhiteSpace(invoiceAnalyzer.FieldSchema.Description)) + { + Console.WriteLine($"Field schema description: {invoiceAnalyzer.FieldSchema.Description}"); + } + + // Verify base analyzer ID + if (!string.IsNullOrWhiteSpace(invoiceAnalyzer.BaseAnalyzerId)) + { + Console.WriteLine($"Base analyzer ID: {invoiceAnalyzer.BaseAnalyzerId}"); + } + + // Verify description + if (!string.IsNullOrWhiteSpace(invoiceAnalyzer.Description)) + { + Console.WriteLine($"Description: {invoiceAnalyzer.Description}"); + } + + // Verify config + if (invoiceAnalyzer.Config != null) + { + Console.WriteLine("Invoice analyzer has configuration"); + if (invoiceAnalyzer.Config.EnableOcr.HasValue) + { + Console.WriteLine($" EnableOcr: {invoiceAnalyzer.Config.EnableOcr.Value}"); + } + if (invoiceAnalyzer.Config.EnableLayout.HasValue) + { + Console.WriteLine($" EnableLayout: {invoiceAnalyzer.Config.EnableLayout.Value}"); + } + if (invoiceAnalyzer.Config.EstimateFieldSourceAndConfidence.HasValue) + { + Console.WriteLine($" EstimateFieldSourceAndConfidence: {invoiceAnalyzer.Config.EstimateFieldSourceAndConfidence.Value}"); + } + } + + // Verify models + if (invoiceAnalyzer.Models != null && invoiceAnalyzer.Models.Count > 0) + { + Console.WriteLine($"Invoice analyzer has {invoiceAnalyzer.Models.Count} model mapping(s)"); + foreach (var model in invoiceAnalyzer.Models) + { + Console.WriteLine($" {model.Key}: {model.Value}"); + } + } + + Console.WriteLine("All prebuilt invoice analyzer properties validated successfully"); + #endregion + } + + [RecordedTest] + public async Task GetCustomAnalyzerAsync() + { + #region Snippet:ContentUnderstandingGetCustomAnalyzer +#if SNIPPET + string endpoint = ""; + string apiKey = ""; // Set to null to use DefaultAzureCredential + var client = !string.IsNullOrEmpty(apiKey) + ? new ContentUnderstandingClient(new Uri(endpoint), new AzureKeyCredential(apiKey)) + : new ContentUnderstandingClient(new Uri(endpoint), new DefaultAzureCredential()); + + // Generate a unique analyzer ID + string analyzerId = $"my_custom_analyzer_{DateTimeOffset.UtcNow.ToUnixTimeSeconds()}"; +#else + string endpoint = TestEnvironment.Endpoint; + var options = InstrumentClientOptions(new ContentUnderstandingClientOptions()); + var client = InstrumentClient(new ContentUnderstandingClient(new Uri(endpoint), TestEnvironment.Credential, options)); + + // First, create a custom analyzer + string defaultId = $"test_custom_analyzer_{Recording.Random.NewGuid().ToString("N")}"; + string analyzerId = Recording.GetVariable("analyzerId", defaultId) ?? defaultId; +#endif + + // Define field schema with custom fields + var fieldSchema = new ContentFieldSchema( + new Dictionary + { + ["company_name"] = new ContentFieldDefinition + { + Type = ContentFieldType.String, + Method = GenerationMethod.Extract, + Description = "Name of the company" + } + }) + { + Name = "test_schema", + Description = "Test schema for GetAnalyzer sample" + }; + + // Create analyzer configuration + var config = new ContentAnalyzerConfig + { + ReturnDetails = true + }; + + // Create the custom analyzer + var analyzer = new ContentAnalyzer + { + BaseAnalyzerId = "prebuilt-document", + Description = "Test analyzer for GetAnalyzer sample", + Config = config, + FieldSchema = fieldSchema + }; + analyzer.Models.Add("completion", "gpt-4.1"); + + // Create the analyzer + await client.CreateAnalyzerAsync( + WaitUntil.Completed, + analyzerId, + analyzer); + + try + { + // Get information about the custom analyzer + var response = await client.GetAnalyzerAsync(analyzerId); + ContentAnalyzer retrievedAnalyzer = response.Value; + + // Display full analyzer JSON + var jsonOptions = new JsonSerializerOptions + { + WriteIndented = true, + DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull + }; + string analyzerJson = JsonSerializer.Serialize(retrievedAnalyzer, jsonOptions); + Console.WriteLine("Custom Analyzer:"); + Console.WriteLine(analyzerJson); + #endregion + + #region Assertion:ContentUnderstandingGetCustomAnalyzer + Assert.IsNotNull(response, "Response should not be null"); + Assert.IsTrue(response.HasValue, "Response should have a value"); + Assert.IsNotNull(retrievedAnalyzer, "Retrieved analyzer should not be null"); + Console.WriteLine($"Get custom analyzer response verified for '{analyzerId}'"); + + // Verify raw response + var rawResponse = response.GetRawResponse(); + Assert.IsNotNull(rawResponse, "Raw response should not be null"); + Assert.AreEqual(200, rawResponse.Status, "Response status should be 200"); + Assert.IsNotNull(rawResponse.Content, "Response content should not be null"); + Console.WriteLine($"Raw response status: {rawResponse.Status}"); + + // Verify analyzer can be serialized to JSON + Assert.IsNotNull(analyzerJson, "Analyzer JSON should not be null"); + Assert.IsTrue(analyzerJson.Length > 0, "Analyzer JSON should not be empty"); + Console.WriteLine($"Analyzer JSON length: {analyzerJson.Length} characters"); + + // Verify the analyzer properties match what we created + Assert.IsNotNull(retrievedAnalyzer.BaseAnalyzerId, "Base analyzer ID should not be null"); + Assert.AreEqual("prebuilt-document", retrievedAnalyzer.BaseAnalyzerId, + "Base analyzer ID should match"); + Console.WriteLine($"Base analyzer ID verified: {retrievedAnalyzer.BaseAnalyzerId}"); + + Assert.IsNotNull(retrievedAnalyzer.Description, "Description should not be null"); + Assert.AreEqual("Test analyzer for GetAnalyzer sample", retrievedAnalyzer.Description, + "Description should match"); + Console.WriteLine($"Description verified: {retrievedAnalyzer.Description}"); + + // Verify field schema + Assert.IsNotNull(retrievedAnalyzer.FieldSchema, "Field schema should not be null"); + Assert.IsNotNull(retrievedAnalyzer.FieldSchema!.Name, "Schema name should not be null"); + Assert.AreEqual("test_schema", retrievedAnalyzer.FieldSchema.Name, + "Schema name should match"); + Console.WriteLine($"Field schema name verified: {retrievedAnalyzer.FieldSchema.Name}"); + + Assert.IsNotNull(retrievedAnalyzer.FieldSchema.Description, "Schema description should not be null"); + Assert.AreEqual("Test schema for GetAnalyzer sample", retrievedAnalyzer.FieldSchema.Description, + "Schema description should match"); + Console.WriteLine($"Field schema description verified"); + + Assert.IsNotNull(retrievedAnalyzer.FieldSchema.Fields, "Fields should not be null"); + Assert.AreEqual(1, retrievedAnalyzer.FieldSchema.Fields.Count, + "Should have 1 custom field"); + Console.WriteLine($"Field count verified: {retrievedAnalyzer.FieldSchema.Fields.Count}"); + + Assert.IsTrue(retrievedAnalyzer.FieldSchema.Fields.ContainsKey("company_name"), + "Should contain company_name field"); + Console.WriteLine("company_name field found"); + + // Verify field definition in detail + var companyNameField = retrievedAnalyzer.FieldSchema.Fields["company_name"]; + Assert.IsNotNull(companyNameField, "company_name field should not be null"); + Assert.AreEqual(ContentFieldType.String, companyNameField.Type, + "Field type should be String"); + Console.WriteLine($" Type: {companyNameField.Type}"); + + Assert.AreEqual(GenerationMethod.Extract, companyNameField.Method, + "Field method should be Extract"); + Console.WriteLine($" Method: {companyNameField.Method}"); + + Assert.IsNotNull(companyNameField.Description, "Field description should not be null"); + Assert.AreEqual("Name of the company", companyNameField.Description, + "Field description should match"); + Console.WriteLine($" Description: {companyNameField.Description}"); + + // Verify config + Assert.IsNotNull(retrievedAnalyzer.Config, "Config should not be null"); + Assert.IsNotNull(retrievedAnalyzer.Config!.ReturnDetails, "ReturnDetails should not be null"); + Assert.AreEqual(true, retrievedAnalyzer.Config.ReturnDetails, + "ReturnDetails should be true"); + Console.WriteLine($"Config verified (ReturnDetails={retrievedAnalyzer.Config.ReturnDetails})"); + + // Verify models + Assert.IsNotNull(retrievedAnalyzer.Models, "Models should not be null"); + Assert.IsTrue(retrievedAnalyzer.Models.Count >= 1, + "Should have at least 1 model mapping"); + Console.WriteLine($"Model mappings count: {retrievedAnalyzer.Models.Count}"); + + Assert.IsTrue(retrievedAnalyzer.Models.ContainsKey("completion"), + "Should contain completion model"); + var completionModel = retrievedAnalyzer.Models["completion"]; + Assert.AreEqual("gpt-4.1", completionModel, "Completion model should be gpt-4.1"); + Console.WriteLine($" completion: {completionModel}"); + + // Verify the retrieved analyzer matches the original + Console.WriteLine("Retrieved analyzer matches original configuration:"); + Console.WriteLine($" - Base analyzer: {retrievedAnalyzer.BaseAnalyzerId}"); + Console.WriteLine($" - Description: {retrievedAnalyzer.Description}"); + Console.WriteLine($" - Field schema: {retrievedAnalyzer.FieldSchema.Name}"); + Console.WriteLine($" - Fields: {retrievedAnalyzer.FieldSchema.Fields.Count}"); + Console.WriteLine($" - Models: {retrievedAnalyzer.Models.Count}"); + + Console.WriteLine("All custom analyzer properties validated successfully"); + #endregion + } + finally + { + // Clean up: delete the analyzer + try + { + await client.DeleteAnalyzerAsync(analyzerId); + Console.WriteLine($"Analyzer '{analyzerId}' deleted successfully."); + } + catch + { + // Ignore cleanup errors in tests + } + } + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample07_ListAnalyzers.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample07_ListAnalyzers.cs new file mode 100644 index 000000000000..2aefb4c98472 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample07_ListAnalyzers.cs @@ -0,0 +1,244 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +#nullable enable + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using Azure; +using Azure.AI.ContentUnderstanding; +using Azure.AI.ContentUnderstanding.Tests; +using Azure.Core; +using Azure.Core.TestFramework; +using NUnit.Framework; + +namespace Azure.AI.ContentUnderstanding.Samples +{ + public partial class ContentUnderstandingSamples + { + [RecordedTest] + public async Task ListAnalyzersAsync() + { + string endpoint = TestEnvironment.Endpoint; + var options = InstrumentClientOptions(new ContentUnderstandingClientOptions()); + var client = InstrumentClient(new ContentUnderstandingClient(new Uri(endpoint), TestEnvironment.Credential, options)); + + #region Snippet:ContentUnderstandingListAnalyzers +#if SNIPPET + // List all analyzers + var analyzers = new List(); + await foreach (var analyzer in client.GetAnalyzersAsync()) + { + analyzers.Add(analyzer); + } + + Console.WriteLine($"Found {analyzers.Count} analyzer(s)"); + + // Display summary + var prebuiltCount = analyzers.Count(a => a.AnalyzerId?.StartsWith("prebuilt-") == true); + var customCount = analyzers.Count(a => a.AnalyzerId?.StartsWith("prebuilt-") != true); + Console.WriteLine($" Prebuilt analyzers: {prebuiltCount}"); + Console.WriteLine($" Custom analyzers: {customCount}"); + + // Display details for each analyzer + foreach (var analyzer in analyzers) + { + Console.WriteLine($" ID: {analyzer.AnalyzerId}"); + Console.WriteLine($" Description: {analyzer.Description ?? "(none)"}"); + Console.WriteLine($" Status: {analyzer.Status}"); + + if (analyzer.AnalyzerId?.StartsWith("prebuilt-") == true) + { + Console.WriteLine(" Type: Prebuilt analyzer"); + } + else + { + Console.WriteLine(" Type: Custom analyzer"); + } + } +#else + // List all analyzers + var analyzers = new List(); + await foreach (var analyzer in client.GetAnalyzersAsync()) + { + analyzers.Add(analyzer); + } + + Console.WriteLine($"Found {analyzers.Count} analyzer(s)"); + + // Display summary + var prebuiltCount = analyzers.Count(a => a.AnalyzerId?.StartsWith("prebuilt-") == true); + var customCount = analyzers.Count(a => a.AnalyzerId?.StartsWith("prebuilt-") != true); + Console.WriteLine($" Prebuilt analyzers: {prebuiltCount}"); + Console.WriteLine($" Custom analyzers: {customCount}"); + + // Display details for each analyzer (limit to first 10 for test output) + foreach (var analyzer in analyzers.Take(10)) + { + Console.WriteLine($" ID: {analyzer.AnalyzerId}"); + Console.WriteLine($" Description: {analyzer.Description ?? "(none)"}"); + Console.WriteLine($" Status: {analyzer.Status}"); + + if (analyzer.AnalyzerId?.StartsWith("prebuilt-") == true) + { + Console.WriteLine(" Type: Prebuilt analyzer"); + } + else + { + Console.WriteLine(" Type: Custom analyzer"); + } + } +#endif + #endregion + + #region Assertion:ContentUnderstandingListAnalyzers + Assert.IsNotNull(analyzers, "Analyzers list should not be null"); + Assert.IsTrue(analyzers.Count > 0, "Should have at least one analyzer"); + Console.WriteLine($"Found {analyzers.Count} analyzer(s)"); + + // Verify counts + Assert.IsTrue(prebuiltCount >= 0, "Prebuilt count should be >= 0"); + Assert.IsTrue(customCount >= 0, "Custom count should be >= 0"); + Assert.AreEqual(analyzers.Count, prebuiltCount + customCount, + "Total count should equal prebuilt + custom count"); + Console.WriteLine($"Count breakdown: {prebuiltCount} prebuilt, {customCount} custom"); + + // Verify prebuilt analyzers exist (there should always be some prebuilt analyzers) + Assert.IsTrue(prebuiltCount > 0, "Should have at least one prebuilt analyzer"); + Console.WriteLine($"Prebuilt analyzers present: {prebuiltCount}"); + + // Verify each analyzer has required properties + int validAnalyzers = 0; + int analyzersWithDescription = 0; + + foreach (var analyzer in analyzers) + { + Assert.IsNotNull(analyzer, "Analyzer should not be null"); + Assert.IsNotNull(analyzer.AnalyzerId, "Analyzer ID should not be null"); + Assert.IsFalse(string.IsNullOrWhiteSpace(analyzer.AnalyzerId), + $"Analyzer ID should not be empty or whitespace"); + + validAnalyzers++; + + // Track optional properties + if (!string.IsNullOrWhiteSpace(analyzer.Description)) + { + analyzersWithDescription++; + } + + // Verify analyzer ID format (should not contain spaces or special characters) + Assert.IsFalse(analyzer.AnalyzerId.Contains(" "), + $"Analyzer ID should not contain spaces: {analyzer.AnalyzerId}"); + } + + Assert.AreEqual(analyzers.Count, validAnalyzers, "All analyzers should have valid IDs"); + Console.WriteLine($"All {validAnalyzers} analyzers have valid IDs"); + Console.WriteLine($" Analyzers with description: {analyzersWithDescription}"); + + // Verify common prebuilt analyzers exist + var analyzerIds = new List(); + foreach (var analyzer in analyzers) + { + if (analyzer.AnalyzerId != null) + { + analyzerIds.Add(analyzer.AnalyzerId); + } + } + var commonPrebuiltAnalyzers = new[] + { + "prebuilt-document", + "prebuilt-documentSearch", + "prebuilt-invoice" + }; + + int foundCommonAnalyzers = 0; + foreach (var prebuiltId in commonPrebuiltAnalyzers) + { + if (analyzerIds.Contains(prebuiltId)) + { + foundCommonAnalyzers++; + Console.WriteLine($" Found common analyzer: {prebuiltId}"); + } + else + { + Console.WriteLine($" āš ļø Common analyzer not found: {prebuiltId}"); + } + + Assert.IsTrue(analyzerIds.Contains(prebuiltId), + $"Should contain common prebuilt analyzer: {prebuiltId}"); + } + + Assert.AreEqual(commonPrebuiltAnalyzers.Length, foundCommonAnalyzers, + "All common prebuilt analyzers should be present"); + Console.WriteLine($"All {foundCommonAnalyzers} common prebuilt analyzers verified"); + + // Verify prebuilt analyzer naming convention + var prebuiltAnalyzers = analyzers.Where(a => a.AnalyzerId?.StartsWith("prebuilt-") == true).ToList(); + Assert.AreEqual(prebuiltCount, prebuiltAnalyzers.Count, + "Prebuilt count should match filtered list"); + + foreach (var prebuilt in prebuiltAnalyzers) + { + Assert.IsTrue(prebuilt.AnalyzerId!.StartsWith("prebuilt-"), + $"Prebuilt analyzer ID should start with 'prebuilt-': {prebuilt.AnalyzerId}"); + + // Verify prebuilt analyzer ID format (should be lowercase with hyphens) + Assert.IsFalse(prebuilt.AnalyzerId.Contains(" "), + $"Prebuilt analyzer ID should not contain spaces: {prebuilt.AnalyzerId}"); + Assert.IsFalse(prebuilt.AnalyzerId.Contains("_"), + $"Prebuilt analyzer ID should use hyphens, not underscores: {prebuilt.AnalyzerId}"); + } + Console.WriteLine($"All {prebuiltAnalyzers.Count} prebuilt analyzers follow naming convention"); + + // Verify custom analyzers (if any) + var customAnalyzers = analyzers.Where(a => a.AnalyzerId?.StartsWith("prebuilt-") != true).ToList(); + Assert.AreEqual(customCount, customAnalyzers.Count, + "Custom count should match filtered list"); + + if (customAnalyzers.Count > 0) + { + Console.WriteLine($"Found {customAnalyzers.Count} custom analyzer(s):"); + foreach (var custom in customAnalyzers.Take(5)) // Show first 5 custom analyzers + { + Console.WriteLine($" - {custom.AnalyzerId}"); + if (!string.IsNullOrWhiteSpace(custom.Description)) + { + Console.WriteLine($" Description: {custom.Description}"); + } + } + if (customAnalyzers.Count > 5) + { + Console.WriteLine($" ... and {customAnalyzers.Count - 5} more"); + } + } + else + { + Console.WriteLine("No custom analyzers found"); + } + + // Verify no duplicate analyzer IDs + var duplicateIds = analyzerIds + .GroupBy(id => id) + .Where(g => g.Count() > 1) + .Select(g => g.Key) + .ToList(); + + Assert.AreEqual(0, duplicateIds.Count, + $"Should not have duplicate analyzer IDs: {string.Join(", ", duplicateIds)}"); + Assert.AreEqual(analyzers.Count, analyzerIds.Count, + "Number of unique analyzer IDs should match total count"); + Console.WriteLine($"All analyzer IDs are unique"); + + // Summary statistics + Console.WriteLine($"\nVerification completed successfully:"); + Console.WriteLine($" Total analyzers: {analyzers.Count}"); + Console.WriteLine($" Prebuilt: {prebuiltCount} ({(double)prebuiltCount / analyzers.Count * 100:F1}%)"); + Console.WriteLine($" Custom: {customCount} ({(double)customCount / analyzers.Count * 100:F1}%)"); + Console.WriteLine($" With description: {analyzersWithDescription} ({(double)analyzersWithDescription / analyzers.Count * 100:F1}%)"); + Console.WriteLine($" Common prebuilt analyzers: {foundCommonAnalyzers}/{commonPrebuiltAnalyzers.Length}"); + #endregion + } + } +} \ No newline at end of file diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample08_UpdateAnalyzer.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample08_UpdateAnalyzer.cs new file mode 100644 index 000000000000..7c6b1d2aa556 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample08_UpdateAnalyzer.cs @@ -0,0 +1,309 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +#nullable enable + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using Azure; +using Azure.AI.ContentUnderstanding; +using Azure.AI.ContentUnderstanding.Tests; +using Azure.Core; +using Azure.Core.TestFramework; +using NUnit.Framework; + +namespace Azure.AI.ContentUnderstanding.Samples +{ + public partial class ContentUnderstandingSamples + { + [RecordedTest] + public async Task UpdateAnalyzerAsync() + { + string endpoint = TestEnvironment.Endpoint; + var options = InstrumentClientOptions(new ContentUnderstandingClientOptions()); + var client = InstrumentClient(new ContentUnderstandingClient(new Uri(endpoint), TestEnvironment.Credential, options)); + + // First create an analyzer to update + string defaultId = $"test_analyzer_{Recording.Random.NewGuid().ToString("N")}"; + string analyzerId = Recording.GetVariable("updateAnalyzerId", defaultId) ?? defaultId; + + var initialAnalyzer = new ContentAnalyzer + { + BaseAnalyzerId = "prebuilt-document", + Description = "Initial description", + Config = new ContentAnalyzerConfig + { + ReturnDetails = true + } + }; + initialAnalyzer.Models.Add("completion", "gpt-4.1"); + initialAnalyzer.Tags["tag1"] = "tag1_initial_value"; + initialAnalyzer.Tags["tag2"] = "tag2_initial_value"; + + await client.CreateAnalyzerAsync( + WaitUntil.Completed, + analyzerId, + initialAnalyzer, + allowReplace: true); + + try + { + #region Snippet:ContentUnderstandingUpdateAnalyzer +#if SNIPPET + // First, get the current analyzer to preserve base analyzer ID + var currentAnalyzer = await client.GetAnalyzerAsync(analyzerId); + + // Display current analyzer information + Console.WriteLine("Current analyzer information:"); + Console.WriteLine($" Description: {currentAnalyzer.Value.Description}"); + Console.WriteLine($" Tags: {string.Join(", ", currentAnalyzer.Value.Tags.Select(kvp => $"{kvp.Key}={kvp.Value}"))}"); + + // Create an updated analyzer with new description and tags + var updatedAnalyzer = new ContentAnalyzer + { + BaseAnalyzerId = currentAnalyzer.Value.BaseAnalyzerId, + Description = "Updated description" + }; + + // Update tags (empty string removes a tag) + updatedAnalyzer.Tags["tag1"] = "tag1_updated_value"; + updatedAnalyzer.Tags["tag2"] = ""; // Remove tag2 + updatedAnalyzer.Tags["tag3"] = "tag3_value"; // Add tag3 + + // Update the analyzer + await client.UpdateAnalyzerAsync(analyzerId, updatedAnalyzer); + + // Verify the update + var updated = await client.GetAnalyzerAsync(analyzerId); + Console.WriteLine($"Description: {updated.Value.Description}"); + Console.WriteLine($"Tags: {string.Join(", ", updated.Value.Tags.Select(kvp => $"{kvp.Key}={kvp.Value}"))}"); +#else + // First, get the current analyzer to preserve base analyzer ID + var currentAnalyzer = await client.GetAnalyzerAsync(analyzerId); + + // Display current analyzer information + Console.WriteLine("Current analyzer information:"); + Console.WriteLine($" Description: {currentAnalyzer.Value.Description}"); + Console.WriteLine($" Tags: {string.Join(", ", currentAnalyzer.Value.Tags.Select(kvp => $"{kvp.Key}={kvp.Value}"))}"); + + // Create an updated analyzer with new description and tags + var updatedAnalyzer = new ContentAnalyzer + { + BaseAnalyzerId = currentAnalyzer.Value.BaseAnalyzerId, + Description = "Updated description" + }; + + // Update tags (empty string removes a tag) + updatedAnalyzer.Tags["tag1"] = "tag1_updated_value"; + updatedAnalyzer.Tags["tag2"] = ""; // Remove tag2 + updatedAnalyzer.Tags["tag3"] = "tag3_value"; // Add tag3 + + // Update the analyzer + await client.UpdateAnalyzerAsync(analyzerId, updatedAnalyzer); + + // Verify the update + var updated = await client.GetAnalyzerAsync(analyzerId); + Console.WriteLine($"Description: {updated.Value.Description}"); + Console.WriteLine($"Tags: {string.Join(", ", updated.Value.Tags.Select(kvp => $"{kvp.Key}={kvp.Value}"))}"); +#endif + #endregion + + #region Assertion:ContentUnderstandingUpdateAnalyzer + // ========== Verify Initial Analyzer Retrieval ========== + Assert.IsNotNull(currentAnalyzer, "Current analyzer response should not be null"); + Assert.IsTrue(currentAnalyzer.HasValue, "Current analyzer response should have a value"); + Assert.IsNotNull(currentAnalyzer.Value, "Current analyzer value should not be null"); + Console.WriteLine("Initial analyzer retrieved successfully"); + + // Verify raw response + var currentRawResponse = currentAnalyzer.GetRawResponse(); + Assert.IsNotNull(currentRawResponse, "Current analyzer raw response should not be null"); + Assert.AreEqual(200, currentRawResponse.Status, "Response status should be 200"); + Console.WriteLine($"Get current analyzer response status: {currentRawResponse.Status}"); + + // Verify initial description + Assert.IsNotNull(currentAnalyzer.Value.Description, "Initial description should not be null"); + Assert.AreEqual("Initial description", currentAnalyzer.Value.Description, + "Initial description should match"); + Console.WriteLine($"Initial description verified: '{currentAnalyzer.Value.Description}'"); + + // Verify initial base analyzer ID + Assert.IsNotNull(currentAnalyzer.Value.BaseAnalyzerId, "Base analyzer ID should not be null"); + Assert.AreEqual("prebuilt-document", currentAnalyzer.Value.BaseAnalyzerId, + "Base analyzer ID should match"); + Console.WriteLine($"Base analyzer ID verified: {currentAnalyzer.Value.BaseAnalyzerId}"); + + // Verify initial tags + Assert.IsNotNull(currentAnalyzer.Value.Tags, "Initial tags should not be null"); + Assert.AreEqual(2, currentAnalyzer.Value.Tags.Count, + "Should have 2 initial tags"); + Console.WriteLine($"Initial tags count: {currentAnalyzer.Value.Tags.Count}"); + + Assert.IsTrue(currentAnalyzer.Value.Tags.ContainsKey("tag1"), + "Should contain tag1"); + Assert.AreEqual("tag1_initial_value", currentAnalyzer.Value.Tags["tag1"], + "tag1 initial value should match"); + Console.WriteLine($" tag1 = '{currentAnalyzer.Value.Tags["tag1"]}'"); + + Assert.IsTrue(currentAnalyzer.Value.Tags.ContainsKey("tag2"), + "Should contain tag2"); + Assert.AreEqual("tag2_initial_value", currentAnalyzer.Value.Tags["tag2"], + "tag2 initial value should match"); + Console.WriteLine($" tag2 = '{currentAnalyzer.Value.Tags["tag2"]}'"); + + // ========== Verify Update Operation ========== + Assert.IsNotNull(updatedAnalyzer, "Updated analyzer object should not be null"); + Assert.AreEqual(currentAnalyzer.Value.BaseAnalyzerId, updatedAnalyzer.BaseAnalyzerId, + "Updated analyzer should preserve base analyzer ID"); + Assert.AreEqual("Updated description", updatedAnalyzer.Description, + "Updated analyzer should have new description"); + Console.WriteLine("Update analyzer object created with correct properties"); + + // ========== Verify Updated Analyzer Retrieval ========== + Assert.IsNotNull(updated, "Updated analyzer response should not be null"); + Assert.IsTrue(updated.HasValue, "Updated analyzer response should have a value"); + Assert.IsNotNull(updated.Value, "Updated analyzer value should not be null"); + Console.WriteLine("Updated analyzer retrieved successfully"); + + // Verify raw response + var updatedRawResponse = updated.GetRawResponse(); + Assert.IsNotNull(updatedRawResponse, "Updated analyzer raw response should not be null"); + Assert.AreEqual(200, updatedRawResponse.Status, "Response status should be 200"); + Console.WriteLine($"Get updated analyzer response status: {updatedRawResponse.Status}"); + + // ========== Verify Description Update ========== + Assert.IsNotNull(updated.Value.Description, "Updated description should not be null"); + Assert.AreEqual("Updated description", updated.Value.Description, + "Description should be updated"); + Assert.AreNotEqual(currentAnalyzer.Value.Description, updated.Value.Description, + "Description should be different from initial value"); + Console.WriteLine($"Description updated: '{currentAnalyzer.Value.Description}' → '{updated.Value.Description}'"); + + // ========== Verify Base Analyzer ID Preserved ========== + Assert.IsNotNull(updated.Value.BaseAnalyzerId, "Base analyzer ID should not be null"); + Assert.AreEqual("prebuilt-document", updated.Value.BaseAnalyzerId, + "Base analyzer ID should be preserved"); + Assert.AreEqual(currentAnalyzer.Value.BaseAnalyzerId, updated.Value.BaseAnalyzerId, + "Base analyzer ID should remain unchanged"); + Console.WriteLine($"Base analyzer ID preserved: {updated.Value.BaseAnalyzerId}"); + + // ========== Verify Tags Update ========== + Assert.IsNotNull(updated.Value.Tags, "Updated tags should not be null"); + Console.WriteLine($"Updated tags count: {updated.Value.Tags.Count}"); + + // Verify tag1 was updated + Assert.IsTrue(updated.Value.Tags.ContainsKey("tag1"), + "Should still contain tag1"); + Assert.AreEqual("tag1_updated_value", updated.Value.Tags["tag1"], + "tag1 should have updated value"); + Assert.AreNotEqual(currentAnalyzer.Value.Tags["tag1"], updated.Value.Tags["tag1"], + "tag1 value should be different from initial value"); + Console.WriteLine($" tag1 updated: '{currentAnalyzer.Value.Tags["tag1"]}' → '{updated.Value.Tags["tag1"]}'"); + // Verify tag2 behavior (empty string value) + Assert.IsTrue(updated.Value.Tags.ContainsKey("tag2"), + "tag2 should still exist (empty string doesn't remove tags)"); + Assert.IsNotNull(updated.Value.Tags["tag2"], "tag2 value should not be null"); + Assert.AreEqual("", updated.Value.Tags["tag2"], + "tag2 should have empty string value"); + Assert.AreNotEqual(currentAnalyzer.Value.Tags["tag2"], updated.Value.Tags["tag2"], + "tag2 value should be different from initial value"); + Console.WriteLine($" tag2 set to empty: '{currentAnalyzer.Value.Tags["tag2"]}' → '' (empty string)"); + // Verify tag3 was added + Assert.IsTrue(updated.Value.Tags.ContainsKey("tag3"), + "Should contain new tag3"); + Assert.AreEqual("tag3_value", updated.Value.Tags["tag3"], + "tag3 should have correct value"); + Assert.IsFalse(currentAnalyzer.Value.Tags.ContainsKey("tag3"), + "tag3 should not exist in initial analyzer"); + Console.WriteLine($" tag3 added: (new) → '{updated.Value.Tags["tag3"]}'"); + + // Verify tag count (should be 3: tag1, tag2 with empty string, tag3) + Assert.AreEqual(3, updated.Value.Tags.Count, + "Should have 3 tags after update (tag1 updated, tag2 set to empty, tag3 added)"); + + // ========== Verify Config Preservation ========== + if (currentAnalyzer.Value.Config != null) + { + if (updated.Value.Config != null) + { + // Config properties should be preserved if not explicitly updated + Console.WriteLine("Config exists in updated analyzer"); + + if (currentAnalyzer.Value.Config.ReturnDetails.HasValue && + updated.Value.Config.ReturnDetails.HasValue) + { + Console.WriteLine($" ReturnDetails: {updated.Value.Config.ReturnDetails.Value}"); + } + } + else + { + Console.WriteLine("āš ļø Config not present in updated analyzer (may have been reset)"); + } + } + + // ========== Verify Models Preservation ========== + if (currentAnalyzer.Value.Models != null && currentAnalyzer.Value.Models.Count > 0) + { + if (updated.Value.Models != null) + { + Console.WriteLine($"Models exist in updated analyzer: {updated.Value.Models.Count} model(s)"); + + if (currentAnalyzer.Value.Models.ContainsKey("completion") && + updated.Value.Models.ContainsKey("completion")) + { + Assert.AreEqual(currentAnalyzer.Value.Models["completion"], + updated.Value.Models["completion"], + "Completion model should be preserved"); + Console.WriteLine($" completion: {updated.Value.Models["completion"]}"); + } + } + else + { + Console.WriteLine("āš ļø Models not present in updated analyzer (may have been reset)"); + } + } + + // ========== Summary ========== + Console.WriteLine("\nUpdate verification completed successfully:"); + Console.WriteLine($" Analyzer ID: {analyzerId}"); + Console.WriteLine($" Description: '{currentAnalyzer.Value.Description}' → '{updated.Value.Description}'"); + Console.WriteLine($" Base Analyzer: {updated.Value.BaseAnalyzerId} (preserved)"); + Console.WriteLine($" Tags before update: {currentAnalyzer.Value.Tags.Count} tag(s)"); + Console.WriteLine($" - tag1: '{currentAnalyzer.Value.Tags["tag1"]}'"); + Console.WriteLine($" - tag2: '{currentAnalyzer.Value.Tags["tag2"]}'"); + Console.WriteLine($" Tags after update: {updated.Value.Tags.Count} tag(s)"); + Console.WriteLine($" - tag1: '{updated.Value.Tags["tag1"]}' (updated)"); + Console.WriteLine($" - tag2: '' (set to empty)"); + Console.WriteLine($" - tag3: '{updated.Value.Tags["tag3"]}' (added)"); + + // ========== Verify Changes Summary ========== + var changedProperties = new List(); + if (currentAnalyzer.Value.Description != updated.Value.Description) + changedProperties.Add("Description"); + if (currentAnalyzer.Value.Tags.Count != updated.Value.Tags.Count || + !currentAnalyzer.Value.Tags.SequenceEqual(updated.Value.Tags)) + changedProperties.Add("Tags"); + + Console.WriteLine($" Properties changed: {string.Join(", ", changedProperties)}"); + Console.WriteLine($" Properties preserved: BaseAnalyzerId" + + (updated.Value.Config != null ? ", Config" : "") + + (updated.Value.Models != null && updated.Value.Models.Count > 0 ? ", Models" : "")); + #endregion + } + finally + { + // Clean up: delete the analyzer + try + { + await client.DeleteAnalyzerAsync(analyzerId); + } + catch + { + // Ignore cleanup errors in tests + } + } + } + } +} \ No newline at end of file diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample09_DeleteAnalyzer.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample09_DeleteAnalyzer.cs new file mode 100644 index 000000000000..f443ec63531a --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample09_DeleteAnalyzer.cs @@ -0,0 +1,242 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +#nullable enable + +using System; +using System.Collections.Generic; +using System.Threading.Tasks; +using Azure; +using Azure.AI.ContentUnderstanding; +using Azure.AI.ContentUnderstanding.Tests; +using Azure.Core; +using Azure.Core.TestFramework; +using NUnit.Framework; + +namespace Azure.AI.ContentUnderstanding.Samples +{ + public partial class ContentUnderstandingSamples + { + [RecordedTest] + public async Task DeleteAnalyzerAsync() + { + string endpoint = TestEnvironment.Endpoint; + var options = InstrumentClientOptions(new ContentUnderstandingClientOptions()); + var client = InstrumentClient(new ContentUnderstandingClient(new Uri(endpoint), TestEnvironment.Credential, options)); + + #region Snippet:ContentUnderstandingCreateSimpleAnalyzer + // First create a simple analyzer to delete +#if SNIPPET + // Generate a unique analyzer ID + string analyzerId = $"my_analyzer_{DateTimeOffset.UtcNow.ToUnixTimeSeconds()}"; +#else + // Generate a unique analyzer ID and record it for playback + string defaultId = $"test_analyzer_{Recording.Random.NewGuid().ToString("N")}"; + string analyzerId = Recording.GetVariable("deleteAnalyzerId", defaultId) ?? defaultId; +#endif + + // Create a simple analyzer + var analyzer = new ContentAnalyzer + { + BaseAnalyzerId = "prebuilt-document", + Description = "Simple analyzer for deletion example", + Config = new ContentAnalyzerConfig + { + ReturnDetails = true + } + }; + analyzer.Models.Add("completion", "gpt-4.1"); + + await client.CreateAnalyzerAsync( + WaitUntil.Completed, + analyzerId, + analyzer, + allowReplace: true); + + Console.WriteLine($"Analyzer '{analyzerId}' created successfully."); + #endregion + + #region Assertion:ContentUnderstandingCreateSimpleAnalyzer + Assert.IsNotNull(analyzerId, "Analyzer ID should not be null"); + Assert.IsFalse(string.IsNullOrWhiteSpace(analyzerId), "Analyzer ID should not be empty"); + Console.WriteLine($"Analyzer ID generated: {analyzerId}"); + + Assert.IsNotNull(analyzer, "Analyzer object should not be null"); + Assert.AreEqual("prebuilt-document", analyzer.BaseAnalyzerId, "Base analyzer ID should match"); + Assert.AreEqual("Simple analyzer for deletion example", analyzer.Description, "Description should match"); + Assert.IsNotNull(analyzer.Config, "Config should not be null"); + Assert.IsTrue(analyzer.Config.ReturnDetails, "ReturnDetails should be true"); + Assert.IsNotNull(analyzer.Models, "Models should not be null"); + Assert.IsTrue(analyzer.Models.ContainsKey("completion"), "Should have completion model"); + Assert.AreEqual("gpt-4.1", analyzer.Models["completion"], "Completion model should be gpt-4.1"); + Console.WriteLine("Analyzer object configured correctly"); + + // Verify the analyzer was created successfully + var getResponse = await client.GetAnalyzerAsync(analyzerId); + Assert.IsNotNull(getResponse, "Get analyzer response should not be null"); + Assert.IsTrue(getResponse.HasValue, "Get analyzer response should have a value"); + Assert.IsNotNull(getResponse.Value, "Created analyzer should not be null"); + Console.WriteLine("Analyzer retrieved successfully after creation"); + + // Verify raw response + var getRawResponse = getResponse.GetRawResponse(); + Assert.IsNotNull(getRawResponse, "Raw response should not be null"); + Assert.AreEqual(200, getRawResponse.Status, "Response status should be 200"); + Console.WriteLine($"Get analyzer response status: {getRawResponse.Status}"); + + // Verify analyzer properties + Assert.IsNotNull(getResponse.Value.BaseAnalyzerId, "Base analyzer ID should not be null"); + Assert.AreEqual("prebuilt-document", getResponse.Value.BaseAnalyzerId, + "Base analyzer ID should match"); + Console.WriteLine($"Base analyzer ID verified: {getResponse.Value.BaseAnalyzerId}"); + + Assert.IsNotNull(getResponse.Value.Description, "Description should not be null"); + Assert.AreEqual("Simple analyzer for deletion example", getResponse.Value.Description, + "Description should match"); + Console.WriteLine($"Description verified: '{getResponse.Value.Description}'"); + + // Verify config + if (getResponse.Value.Config != null) + { + Console.WriteLine("Config exists"); + if (getResponse.Value.Config.ReturnDetails.HasValue) + { + Assert.AreEqual(true, getResponse.Value.Config.ReturnDetails.Value, + "ReturnDetails should be true"); + Console.WriteLine($" ReturnDetails: {getResponse.Value.Config.ReturnDetails.Value}"); + } + } + + // Verify models + if (getResponse.Value.Models != null) + { + Assert.IsTrue(getResponse.Value.Models.Count >= 1, "Should have at least 1 model"); + Console.WriteLine($"Models verified: {getResponse.Value.Models.Count} model(s)"); + + if (getResponse.Value.Models.ContainsKey("completion")) + { + Assert.AreEqual("gpt-4.1", getResponse.Value.Models["completion"], + "Completion model should be gpt-4.1"); + Console.WriteLine($" completion: {getResponse.Value.Models["completion"]}"); + } + } + + Console.WriteLine($"Verified analyzer '{analyzerId}' exists and is correctly configured before deletion"); + #endregion + + #region Snippet:ContentUnderstandingDeleteAnalyzer + #if SNIPPET + // Delete an analyzer + await client.DeleteAnalyzerAsync(analyzerId); + Console.WriteLine($"Analyzer '{analyzerId}' deleted successfully."); + #else + // Delete an analyzer + await client.DeleteAnalyzerAsync(analyzerId); + Console.WriteLine($"Analyzer '{analyzerId}' deleted successfully."); + #endif + #endregion + + #region Assertion:ContentUnderstandingDeleteAnalyzer + Console.WriteLine($"Attempting to verify deletion of analyzer '{analyzerId}'.. ."); + + // Verify the analyzer was deleted by trying to get it + bool deletionVerified = false; + int? statusCode = null; + string? errorMessage = null; + + try + { + var deletedResponse = await client.GetAnalyzerAsync(analyzerId); + + // If we reach here, the call succeeded which is unexpected + Console.WriteLine($"āš ļø Unexpected: Get analyzer call succeeded after deletion"); + Console.WriteLine($" Response status: {deletedResponse.GetRawResponse().Status}"); + Console.WriteLine($" Analyzer exists: {deletedResponse.HasValue}"); + + if (deletedResponse.HasValue && deletedResponse.Value != null) + { + Console.WriteLine($" Analyzer ID: {deletedResponse.Value.AnalyzerId ?? "(null)"}"); + Console.WriteLine($" Description: {deletedResponse.Value.Description ?? "(null)"}"); + } + + Assert.Fail($"Expected RequestFailedException when getting deleted analyzer '{analyzerId}', but call succeeded with status {deletedResponse.GetRawResponse().Status}"); + } + catch (RequestFailedException ex) + { + // Expected exception - analyzer should not exist + deletionVerified = true; + statusCode = ex.Status; + errorMessage = ex.Message; + + Console.WriteLine($"RequestFailedException caught as expected"); + Console.WriteLine($" Status code: {ex.Status}"); + Console.WriteLine($" Error code: {ex.ErrorCode ?? "(none)"}"); + Console.WriteLine($" Message: {ex.Message}"); + + // Verify status code is 404 (Not Found) or 400 (Bad Request) + Assert.IsTrue(ex.Status == 404 || ex.Status == 400, + $"Expected 404 (Not Found) or 400 (Bad Request) status code for deleted analyzer, but got {ex.Status}"); + + // Verify error message contains relevant information + Assert.IsFalse(string.IsNullOrWhiteSpace(ex.Message), + "Error message should not be empty"); + + if (ex.Status == 404) + { + Console.WriteLine("Status 404 (Not Found) confirms analyzer was deleted"); + } + else if (ex.Status == 400) + { + Console.WriteLine("Status 400 (Bad Request) confirms analyzer does not exist"); + } + } + catch (Exception ex) + { + // Unexpected exception type + Console.WriteLine($"āŒ Unexpected exception type: {ex.GetType().Name}"); + Console.WriteLine($" Message: {ex.Message}"); + Console.WriteLine($" Stack trace: {ex.StackTrace}"); + + Assert.Fail($"Expected RequestFailedException when getting deleted analyzer, but got {ex.GetType().Name}: {ex.Message}"); + } + + // Final verification + Assert.IsTrue(deletionVerified, "Deletion should be verified by catching RequestFailedException"); + Assert.IsNotNull(statusCode, "Status code should be captured"); + Assert.IsTrue(statusCode == 404 || statusCode == 400, + $"Status code should be 404 or 400, but was {statusCode}"); + + Console.WriteLine($"\nDeletion verification completed successfully:"); + Console.WriteLine($" Analyzer ID: {analyzerId}"); + Console.WriteLine($" Deletion verified: Yes"); + Console.WriteLine($" Verification method: RequestFailedException with status {statusCode}"); + Console.WriteLine($" Status code: {statusCode} ({(statusCode == 404 ? "Not Found" : "Bad Request")})"); + + // Additional verification: Try to list analyzers and ensure deleted one is not present + Console.WriteLine($"\nAdditional verification: Checking analyzer list.. ."); + var allAnalyzers = new List(); + await foreach (var a in client.GetAnalyzersAsync()) + { + allAnalyzers.Add(a); + } + + var deletedAnalyzerInList = allAnalyzers.Find(a => a.AnalyzerId == analyzerId); + Assert.IsNull(deletedAnalyzerInList, + $"Deleted analyzer '{analyzerId}' should not appear in the list of analyzers"); + + if (deletedAnalyzerInList == null) + { + Console.WriteLine($"Confirmed: Analyzer '{analyzerId}' not found in list of {allAnalyzers.Count} analyzer(s)"); + } + else + { + Console.WriteLine($"āŒ Warning: Deleted analyzer '{analyzerId}' still appears in list"); + Console.WriteLine($" Analyzer status: {(deletedAnalyzerInList.Status != null ? deletedAnalyzerInList.Status.ToString() : "(none)")}"); + Console.WriteLine($" This may indicate eventual consistency delay"); + } + + Console.WriteLine($"All deletion verifications passed for analyzer '{analyzerId}'"); + #endregion + } + } +} \ No newline at end of file diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample10_AnalyzeConfigs.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample10_AnalyzeConfigs.cs new file mode 100644 index 000000000000..493b948a2d4d --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample10_AnalyzeConfigs.cs @@ -0,0 +1,523 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +#nullable enable + +using System; +using System.IO; +using System.Linq; +using System.Threading.Tasks; +using Azure; +using Azure.AI.ContentUnderstanding; +using Azure.AI.ContentUnderstanding.Tests; +using Azure.Core; +using Azure.Core.TestFramework; +using NUnit.Framework; + +namespace Azure.AI.ContentUnderstanding.Samples +{ + public partial class ContentUnderstandingSamples + { + [RecordedTest] + public async Task AnalyzeConfigsAsync() + { + string endpoint = TestEnvironment.Endpoint; + var options = InstrumentClientOptions(new ContentUnderstandingClientOptions()); + var client = InstrumentClient(new ContentUnderstandingClient(new Uri(endpoint), TestEnvironment.Credential, options)); + + #region Snippet:ContentUnderstandingAnalyzeWithConfigs +#if SNIPPET + string filePath = ""; +#else + string filePath = ContentUnderstandingClientTestEnvironment.CreatePath("sample_document_features.pdf"); +#endif + byte[] fileBytes = File.ReadAllBytes(filePath); + BinaryData binaryData = BinaryData.FromBytes(fileBytes); + + // Analyze with prebuilt-documentSearch which has formulas, layout, and OCR enabled + // These configs enable extraction of charts, annotations, hyperlinks, and formulas + AnalyzeResultOperation operation = await client.AnalyzeBinaryAsync( + WaitUntil.Completed, + "prebuilt-documentSearch", + "application/pdf", + binaryData); + + AnalyzeResult result = operation.Value; + #endregion + + #region Assertion:ContentUnderstandingAnalyzeWithConfigs + Assert.IsTrue(File.Exists(filePath), $"Sample file not found at {filePath}"); + Assert.IsTrue(fileBytes.Length > 0, "File should not be empty"); + Assert.IsNotNull(binaryData, "Binary data should not be null"); + Console.WriteLine($"File loaded: {filePath} ({fileBytes.Length} bytes)"); + + Assert.IsNotNull(operation, "Analysis operation should not be null"); + Assert.IsTrue(operation.HasCompleted, "Operation should be completed"); + Assert.IsTrue(operation.HasValue, "Operation should have a value"); + Assert.IsNotNull(operation.GetRawResponse(), "Analysis operation should have a raw response"); + Assert.IsTrue(operation.GetRawResponse().Status >= 200 && operation.GetRawResponse().Status < 300, + $"Response status should be successful, but was {operation.GetRawResponse().Status}"); + Console.WriteLine("Analysis operation properties verified"); + + Assert.IsNotNull(result, "Analysis result should not be null"); + Assert.IsNotNull(result.Contents, "Result should contain contents"); + Assert.IsTrue(result.Contents!.Count > 0, "Result should have at least one content"); + Assert.AreEqual(1, result.Contents.Count, "PDF file should have exactly one content element"); + Console.WriteLine($"Analysis result contains {result.Contents.Count} content(s)"); + + // Verify document content type + var firstDocContent = result.Contents?.FirstOrDefault() as DocumentContent; + Assert.IsNotNull(firstDocContent, "Content should be DocumentContent"); + Assert.IsTrue(firstDocContent!.StartPageNumber >= 1, "Start page should be >= 1"); + Assert.IsTrue(firstDocContent.EndPageNumber >= firstDocContent.StartPageNumber, "End page should be >= start page"); + int totalPages = firstDocContent.EndPageNumber - firstDocContent.StartPageNumber + 1; + Console.WriteLine($"Document has {totalPages} page(s) from {firstDocContent.StartPageNumber} to {firstDocContent.EndPageNumber}"); + + Console.WriteLine("Document features analysis with configs completed successfully"); + #endregion + + #region Snippet:ContentUnderstandingExtractCharts + // Extract charts from document content + if (result.Contents?.FirstOrDefault() is DocumentContent documentContent) + { + if (documentContent.Figures != null && documentContent.Figures.Count > 0) + { + var chartFigures = documentContent.Figures + .Where(f => f is DocumentChartFigure) + .Cast() + .ToList(); + + Console.WriteLine($"Found {chartFigures.Count} chart(s)"); + foreach (var chart in chartFigures) + { + Console.WriteLine($" Chart ID: {chart.Id}"); + if (!string.IsNullOrEmpty(chart.Description)) + { + Console.WriteLine($" Description: {chart.Description}"); + } + if (chart.Caption != null && !string.IsNullOrEmpty(chart.Caption.Content)) + { + Console.WriteLine($" Caption: {chart.Caption.Content}"); + } + } + } + } + #endregion + + #region Assertion:ContentUnderstandingExtractCharts + var docContentCharts = result.Contents?.FirstOrDefault() as DocumentContent; + Assert.IsNotNull(docContentCharts, "Content should be DocumentContent"); + Console.WriteLine("\nChart Extraction Verification:"); + + // Charts are optional - GPT sometimes does not detect them + if (docContentCharts!.Figures != null && docContentCharts.Figures.Count > 0) + { + Console.WriteLine($"Found {docContentCharts.Figures.Count} figure(s)"); + + var chartFiguresAssert = docContentCharts.Figures + .Where(f => f is DocumentChartFigure) + .Cast() + .ToList(); + + if (chartFiguresAssert.Count == 0) + { + Console.WriteLine("āš ļø Warning: No charts detected in sample_document_features. pdf"); + Console.WriteLine(" GPT sometimes does not detect charts - this is acceptable"); + } + else + { + Console.WriteLine($"Found {chartFiguresAssert.Count} chart(s)"); + + int chartIndex = 1; + foreach (var chart in chartFiguresAssert) + { + Assert.IsNotNull(chart, $"Chart {chartIndex} should not be null"); + Assert.IsNotNull(chart.Id, $"Chart {chartIndex} ID should not be null"); + Assert.IsFalse(string.IsNullOrWhiteSpace(chart.Id), + $"Chart {chartIndex} ID should not be empty"); + Console.WriteLine($" Chart {chartIndex}: ID = '{chart.Id}'"); + + // Verify description if present + if (!string.IsNullOrWhiteSpace(chart.Description)) + { + Assert.IsTrue(chart.Description.Length > 0, + $"Chart {chartIndex} description should not be empty when present"); + Console.WriteLine($" Description: {chart.Description.Substring(0, Math.Min(50, chart.Description.Length))}{(chart.Description.Length > 50 ? "..." : "")}"); + } + else + { + Console.WriteLine($" Description: (not available)"); + } + + // Verify caption if present + if (chart.Caption != null) + { + Assert.IsNotNull(chart.Caption, $"Chart {chartIndex} caption object should not be null"); + + if (!string.IsNullOrWhiteSpace(chart.Caption.Content)) + { + Assert.IsTrue(chart.Caption.Content.Length > 0, + $"Chart {chartIndex} caption content should not be empty when present"); + Console.WriteLine($" Caption: {chart.Caption.Content}"); + } + else + { + Console.WriteLine($" Caption: (empty)"); + } + } + else + { + Console.WriteLine($" Caption: (not available)"); + } + + chartIndex++; + } + + Console.WriteLine($"Verified {chartFiguresAssert.Count} chart(s)"); + } + } + else + { + Console.WriteLine("āš ļø Warning: No figures detected in sample_document_features. pdf"); + Console.WriteLine(" GPT sometimes does not detect charts - this is acceptable"); + } + #endregion + + #region Snippet:ContentUnderstandingExtractHyperlinks + // Extract hyperlinks from document content + if (result.Contents?.FirstOrDefault() is DocumentContent docContent) + { + if (docContent.Hyperlinks != null && docContent.Hyperlinks.Count > 0) + { + Console.WriteLine($"Found {docContent.Hyperlinks.Count} hyperlink(s)"); + foreach (var hyperlink in docContent.Hyperlinks) + { + Console.WriteLine($" URL: {hyperlink.Url ?? "(not available)"}"); + Console.WriteLine($" Content: {hyperlink.Content ?? "(not available)"}"); + } + } + } + #endregion + + #region Assertion:ContentUnderstandingExtractHyperlinks + var docContentHyperlinks = result.Contents?.FirstOrDefault() as DocumentContent; + Assert.IsNotNull(docContentHyperlinks, "Content should be DocumentContent"); + Console.WriteLine("\nšŸ”— Hyperlink Extraction Verification:"); + + // Hyperlinks should not be empty for sample_document_features.pdf + Assert.IsNotNull(docContentHyperlinks!.Hyperlinks, "Hyperlinks should not be null"); + Assert.IsTrue(docContentHyperlinks.Hyperlinks.Count > 0, + "sample_document_features. pdf should contain hyperlinks"); + Console.WriteLine($"Found {docContentHyperlinks.Hyperlinks.Count} hyperlink(s)"); + + int hyperlinkIndex = 1; + int hyperlinksWithUrl = 0; + int hyperlinksWithContent = 0; + int hyperlinksWithBoth = 0; + + foreach (var hyperlink in docContentHyperlinks.Hyperlinks) + { + Assert.IsNotNull(hyperlink, $"Hyperlink {hyperlinkIndex} should not be null"); + + // At least one of URL or Content should be present + Assert.IsTrue(!string.IsNullOrEmpty(hyperlink.Url) || !string.IsNullOrEmpty(hyperlink.Content), + $"Hyperlink {hyperlinkIndex} should have either URL or Content"); + + bool hasUrl = !string.IsNullOrEmpty(hyperlink.Url); + bool hasContent = !string.IsNullOrEmpty(hyperlink.Content); + + if (hasUrl) hyperlinksWithUrl++; + if (hasContent) hyperlinksWithContent++; + if (hasUrl && hasContent) hyperlinksWithBoth++; + + Console.WriteLine($" Hyperlink {hyperlinkIndex}:"); + + if (hasUrl) + { + Assert.IsTrue(hyperlink.Url!.Length > 0, + $"Hyperlink {hyperlinkIndex} URL should not be empty when present"); + + // Verify URL format (basic validation) + Assert.IsTrue(Uri.IsWellFormedUriString(hyperlink.Url, UriKind.RelativeOrAbsolute), + $"Hyperlink {hyperlinkIndex} URL should be well-formed: {hyperlink.Url}"); + + Console.WriteLine($" URL: {hyperlink.Url}"); + } + else + { + Console.WriteLine($" URL: (not available)"); + } + + if (hasContent) + { + Assert.IsTrue(hyperlink.Content!.Length > 0, + $"Hyperlink {hyperlinkIndex} content should not be empty when present"); + Console.WriteLine($" Content: {hyperlink.Content}"); + } + else + { + Console.WriteLine($" Content: (not available)"); + } + + hyperlinkIndex++; + } + + Console.WriteLine($"\nHyperlink statistics:"); + Console.WriteLine($" Total: {docContentHyperlinks.Hyperlinks.Count}"); + Console.WriteLine($" With URL: {hyperlinksWithUrl} ({(double)hyperlinksWithUrl / docContentHyperlinks.Hyperlinks.Count * 100:F1}%)"); + Console.WriteLine($" With content: {hyperlinksWithContent} ({(double)hyperlinksWithContent / docContentHyperlinks.Hyperlinks.Count * 100:F1}%)"); + Console.WriteLine($" With both: {hyperlinksWithBoth} ({(double)hyperlinksWithBoth / docContentHyperlinks.Hyperlinks.Count * 100:F1}%)"); + Console.WriteLine($"Verified {docContentHyperlinks.Hyperlinks.Count} hyperlink(s)"); + #endregion + + #region Snippet:ContentUnderstandingExtractFormulas + // Extract formulas from document pages + if (result.Contents?.FirstOrDefault() is DocumentContent content) + { + var allFormulas = new System.Collections.Generic.List(); + if (content.Pages != null) + { + foreach (var page in content.Pages) + { + if (page.Formulas != null) + { + allFormulas.AddRange(page.Formulas); + } + } + } + + if (allFormulas.Count > 0) + { + Console.WriteLine($"Found {allFormulas.Count} formula(s)"); + foreach (var formula in allFormulas) + { + Console.WriteLine($" Formula Kind: {formula.Kind}"); + Console.WriteLine($" LaTeX: {formula.Value ?? "(not available)"}"); + if (formula.Confidence.HasValue) + { + Console.WriteLine($" Confidence: {formula.Confidence.Value:F2}"); + } + } + } + } + #endregion + + #region Assertion:ContentUnderstandingExtractFormulas + var docContentFormulas = result.Contents?.FirstOrDefault() as DocumentContent; + Assert.IsNotNull(docContentFormulas, "Content should be DocumentContent"); + Console.WriteLine("\n🧮 Formula Extraction Verification:"); + + // Formulas should not be empty for sample_document_features. pdf + var allFormulasAssert = new System.Collections.Generic.List(); + Assert.IsNotNull(docContentFormulas!.Pages, "Pages should not be null"); + Assert.IsTrue(docContentFormulas.Pages.Count > 0, "Should have at least one page"); + + int pagesWithFormulas = 0; + foreach (var page in docContentFormulas.Pages) + { + if (page.Formulas != null && page.Formulas.Count > 0) + { + pagesWithFormulas++; + allFormulasAssert.AddRange(page.Formulas); + Console.WriteLine($" Page {page.PageNumber}: {page.Formulas.Count} formula(s)"); + } + } + + Assert.IsTrue(allFormulasAssert.Count > 0, + "sample_document_features.pdf should contain formulas"); + Console.WriteLine($"Found {allFormulasAssert.Count} formula(s) across {pagesWithFormulas} page(s)"); + + int formulaIndex = 1; + var formulaKinds = new System.Collections.Generic.Dictionary(); + int formulasWithValue = 0; + int formulasWithConfidence = 0; + + foreach (var formula in allFormulasAssert) + { + Assert.IsNotNull(formula, $"Formula {formulaIndex} should not be null"); + Assert.IsNotNull(formula.Kind, $"Formula {formulaIndex} kind should not be null"); + + // Track formula kinds + if (!formulaKinds.ContainsKey(formula.Kind.ToString())) + formulaKinds[formula.Kind.ToString()] = 0; + formulaKinds[formula.Kind.ToString()]++; + + Console.WriteLine($" Formula {formulaIndex}: Kind = {formula.Kind}"); + + // Value (LaTeX) is optional but should be validated if present + if (!string.IsNullOrWhiteSpace(formula.Value)) + { + formulasWithValue++; + Assert.IsTrue(formula.Value.Length > 0, + $"Formula {formulaIndex} value should not be empty when present"); + Console.WriteLine($" LaTeX: {formula.Value}"); + } + else + { + Console.WriteLine($" LaTeX: (not available)"); + } + + // Confidence is optional but should be in valid range if present + if (formula.Confidence.HasValue) + { + formulasWithConfidence++; + Assert.IsTrue(formula.Confidence.Value >= 0 && formula.Confidence.Value <= 1, + $"Formula {formulaIndex} confidence should be between 0 and 1, but was {formula.Confidence.Value}"); + Console.WriteLine($" Confidence: {formula.Confidence.Value:F2}"); + } + else + { + Console.WriteLine($" Confidence: (not available)"); + } + + formulaIndex++; + } + + Console.WriteLine($"\nFormula statistics:"); + Console.WriteLine($" Total formulas: {allFormulasAssert.Count}"); + Console.WriteLine($" Pages with formulas: {pagesWithFormulas}"); + Console.WriteLine($" With LaTeX value: {formulasWithValue} ({(double)formulasWithValue / allFormulasAssert.Count * 100:F1}%)"); + Console.WriteLine($" With confidence: {formulasWithConfidence} ({(double)formulasWithConfidence / allFormulasAssert.Count * 100:F1}%)"); + Console.WriteLine($" Formula kinds:"); + foreach (var kind in formulaKinds.OrderByDescending(k => k.Value)) + { + Console.WriteLine($" {kind.Key}: {kind.Value} ({(double)kind.Value / allFormulasAssert.Count * 100:F1}%)"); + } + Console.WriteLine($"Verified {allFormulasAssert.Count} formula(s)"); + #endregion + + #region Snippet:ContentUnderstandingExtractAnnotations + // Extract annotations from document content + if (result.Contents?.FirstOrDefault() is DocumentContent document) + { + if (document.Annotations != null && document.Annotations.Count > 0) + { + Console.WriteLine($"Found {document.Annotations.Count} annotation(s)"); + foreach (var annotation in document.Annotations) + { + Console.WriteLine($" Annotation ID: {annotation.Id}"); + Console.WriteLine($" Kind: {annotation.Kind}"); + if (!string.IsNullOrEmpty(annotation.Author)) + { + Console.WriteLine($" Author: {annotation.Author}"); + } + if (annotation.Comments != null && annotation.Comments.Count > 0) + { + Console.WriteLine($" Comments: {annotation.Comments.Count}"); + foreach (var comment in annotation.Comments) + { + Console.WriteLine($" - {comment.Message}"); + } + } + } + } + } + #endregion + + #region Assertion:ContentUnderstandingExtractAnnotations + var docContentAnnotations = result.Contents?.FirstOrDefault() as DocumentContent; + Assert.IsNotNull(docContentAnnotations, "Content should be DocumentContent"); + Console.WriteLine("\nAnnotation Extraction Verification:"); + + // Annotations should not be empty for sample_document_features.pdf + Assert.IsNotNull(docContentAnnotations!.Annotations, "Annotations should not be null"); + Assert.IsTrue(docContentAnnotations.Annotations.Count > 0, + "sample_document_features.pdf should contain annotations"); + Console.WriteLine($"Found {docContentAnnotations.Annotations.Count} annotation(s)"); + + int annotationIndex = 1; + var annotationKinds = new System.Collections.Generic.Dictionary(); + int annotationsWithAuthor = 0; + int annotationsWithComments = 0; + int totalComments = 0; + + foreach (var annotation in docContentAnnotations!.Annotations) + { + Assert.IsNotNull(annotation, $"Annotation {annotationIndex} should not be null"); + Assert.IsNotNull(annotation.Id, $"Annotation {annotationIndex} ID should not be null"); + Assert.IsFalse(string.IsNullOrWhiteSpace(annotation.Id), + $"Annotation {annotationIndex} ID should not be empty"); + Assert.IsNotNull(annotation.Kind, $"Annotation {annotationIndex} kind should not be null"); + Assert.IsFalse(string.IsNullOrWhiteSpace(annotation.Kind.ToString()), + $"Annotation {annotationIndex} kind should not be empty"); + + // Track annotation kinds + if (!annotationKinds.ContainsKey(annotation.Kind.ToString())) + annotationKinds[annotation.Kind.ToString()] = 0; + annotationKinds[annotation.Kind.ToString()]++; + + Console.WriteLine($" Annotation {annotationIndex}:"); + Console.WriteLine($" ID: {annotation.Id}"); + Console.WriteLine($" Kind: {annotation.Kind}"); + + // Verify author if present + if (!string.IsNullOrWhiteSpace(annotation.Author)) + { + annotationsWithAuthor++; + Assert.IsTrue(annotation.Author.Length > 0, + $"Annotation {annotationIndex} author should not be empty when present"); + Console.WriteLine($" Author: {annotation.Author}"); + } + else + { + Console.WriteLine($" Author: (not available)"); + } + + // Validate comments structure if present + if (annotation.Comments != null && annotation.Comments.Count > 0) + { + annotationsWithComments++; + totalComments += annotation.Comments.Count; + + Assert.IsTrue(annotation.Comments.Count > 0, + $"Annotation {annotationIndex} comments should not be empty when not null"); + Console.WriteLine($" Comments: {annotation.Comments.Count}"); + + int commentIndex = 1; + foreach (var comment in annotation.Comments) + { + Assert.IsNotNull(comment, + $"Annotation {annotationIndex} comment {commentIndex} should not be null"); + Assert.IsNotNull(comment.Message, + $"Annotation {annotationIndex} comment {commentIndex} message should not be null"); + Assert.IsFalse(string.IsNullOrWhiteSpace(comment.Message), + $"Annotation {annotationIndex} comment {commentIndex} message should not be empty"); + + Console.WriteLine($" {commentIndex}. {comment.Message}"); + + // Verify author if present in comment + if (!string.IsNullOrWhiteSpace(comment.Author)) + { + Console.WriteLine($" Author: {comment.Author}"); + } + + commentIndex++; + } + } + else + { + Console.WriteLine($" Comments: (none)"); + } + + annotationIndex++; + } + + Console.WriteLine($"\nAnnotation statistics:"); + Console.WriteLine($" Total annotations: {docContentAnnotations.Annotations.Count}"); + Console.WriteLine($" With author: {annotationsWithAuthor} ({(double)annotationsWithAuthor / docContentAnnotations.Annotations.Count * 100:F1}%)"); + Console.WriteLine($" With comments: {annotationsWithComments} ({(double)annotationsWithComments / docContentAnnotations.Annotations.Count * 100:F1}%)"); + Console.WriteLine($" Total comments: {totalComments}"); + if (annotationsWithComments > 0) + { + Console.WriteLine($" Average comments per annotation: {(double)totalComments / annotationsWithComments:F1}"); + } + Console.WriteLine($" Annotation kinds:"); + foreach (var kind in annotationKinds.OrderByDescending(k => k.Value)) + { + Console.WriteLine($" {kind.Key}: {kind.Value} ({(double)kind.Value / docContentAnnotations.Annotations.Count * 100:F1}%)"); + } + Console.WriteLine($"Verified {docContentAnnotations.Annotations.Count} annotation(s)"); + #endregion + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample11_AnalyzeReturnRawJson.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample11_AnalyzeReturnRawJson.cs new file mode 100644 index 000000000000..6b8c4f64c4ed --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample11_AnalyzeReturnRawJson.cs @@ -0,0 +1,473 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +#nullable enable + +using System; +using System.IO; +using System.Linq; +using System.Text.Json; +using System.Threading.Tasks; +using Azure; +using Azure.AI.ContentUnderstanding; +using Azure.AI.ContentUnderstanding.Tests; +using Azure.Core; +using Azure.Core.TestFramework; +using NUnit.Framework; + +namespace Azure.AI.ContentUnderstanding.Samples +{ + public partial class ContentUnderstandingSamples + { + [RecordedTest] + public async Task AnalyzeReturnRawJsonAsync() + { + string endpoint = TestEnvironment.Endpoint; + var options = InstrumentClientOptions(new ContentUnderstandingClientOptions()); + var client = InstrumentClient(new ContentUnderstandingClient(new Uri(endpoint), TestEnvironment.Credential, options)); + + #region Snippet:ContentUnderstandingAnalyzeReturnRawJson +#if SNIPPET + string filePath = ""; +#else + string filePath = ContentUnderstandingClientTestEnvironment.CreatePath("sample_invoice.pdf"); +#endif + byte[] fileBytes = File.ReadAllBytes(filePath); + + // Use protocol method to get raw JSON response + // Note: For production use, prefer the object model approach (AnalyzeBinaryAsync with BinaryData) + // which returns AnalyzeResult objects that are easier to work with + var operation = await client.AnalyzeBinaryAsync( + WaitUntil.Completed, + "prebuilt-documentSearch", + "application/pdf", + RequestContent.Create(BinaryData.FromBytes(fileBytes))); + + BinaryData responseData = operation.Value; + #endregion + + #region Assertion:ContentUnderstandingAnalyzeReturnRawJson + Assert.IsTrue(File.Exists(filePath), $"Sample file not found at {filePath}"); + Assert.IsTrue(fileBytes.Length > 0, "File should not be empty"); + Console.WriteLine($"File loaded: {filePath} ({fileBytes.Length} bytes)"); + + Assert.IsNotNull(operation, "Analysis operation should not be null"); + Assert.IsTrue(operation.HasCompleted, "Operation should be completed"); + Assert.IsTrue(operation.HasValue, "Operation should have a value"); + Assert.IsNotNull(operation.GetRawResponse(), "Analysis operation should have a raw response"); + Assert.IsTrue(operation.GetRawResponse().Status >= 200 && operation.GetRawResponse().Status < 300, + $"Response status should be successful, but was {operation.GetRawResponse().Status}"); + Console.WriteLine($"Analysis operation completed with status: {operation.GetRawResponse().Status}"); + + Assert.IsNotNull(responseData, "Response data should not be null"); + Assert.IsTrue(responseData.ToMemory().Length > 0, "Response data should not be empty"); + Console.WriteLine($"Response data size: {responseData.ToMemory().Length:N0} bytes"); + + // Verify response data can be converted to string + var responseString = responseData.ToString(); + Assert.IsNotNull(responseString, "Response string should not be null"); + Assert.IsTrue(responseString.Length > 0, "Response string should not be empty"); + Console.WriteLine($"Response string length: {responseString.Length:N0} characters"); + + // Verify response is valid JSON format + try + { + using var testDoc = JsonDocument.Parse(responseData); + Assert.IsNotNull(testDoc, "Response should be valid JSON"); + Assert.IsNotNull(testDoc.RootElement, "JSON should have root element"); + Console.WriteLine("Response is valid JSON format"); + } + catch (JsonException ex) + { + Assert.Fail($"Response data is not valid JSON: {ex.Message}"); + } + + Console.WriteLine("Raw JSON analysis operation completed successfully"); + #endregion + + #region Snippet:ContentUnderstandingParseRawJson + // Parse the raw JSON response + using var jsonDocument = JsonDocument.Parse(responseData); + + // Pretty-print the JSON + string prettyJson = JsonSerializer.Serialize( + jsonDocument.RootElement, + new JsonSerializerOptions { WriteIndented = true }); + + // Create output directory if it doesn't exist + string outputDir = Path.Combine(AppContext.BaseDirectory, "sample_output"); + Directory.CreateDirectory(outputDir); + + // Save to file + string outputFileName = $"analyze_result_{DateTime.UtcNow:yyyyMMdd_HHmmss}.json"; + string outputPath = Path.Combine(outputDir, outputFileName); + File.WriteAllText(outputPath, prettyJson); + + Console.WriteLine($"Raw JSON response saved to: {outputPath}"); + Console.WriteLine($"File size: {prettyJson.Length:N0} characters"); + #endregion + + #region Assertion:ContentUnderstandingParseRawJson + Assert.IsNotNull(jsonDocument, "JSON document should not be null"); + Assert.IsNotNull(jsonDocument.RootElement, "JSON root element should not be null"); + Console.WriteLine("JSON document parsed successfully"); + + Assert.IsNotNull(prettyJson, "Pretty JSON string should not be null"); + Assert.IsTrue(prettyJson.Length > 0, "Pretty JSON should not be empty"); + Assert.IsTrue(prettyJson.Length >= responseData.ToString().Length, + "Pretty JSON should be same size or larger than original (due to indentation)"); + Console.WriteLine($"Pretty JSON generated: {prettyJson.Length:N0} characters"); + + // Verify JSON is properly indented + Assert.IsTrue(prettyJson.Contains("\n") || prettyJson.Contains("\r"), + "Pretty JSON should contain line breaks"); + Assert.IsTrue(prettyJson.Contains(" ") || prettyJson.Contains("\t"), + "Pretty JSON should contain indentation"); + Console.WriteLine("JSON is properly formatted with indentation"); + + // Verify output directory + Assert.IsNotNull(outputDir, "Output directory path should not be null"); + Assert.IsFalse(string.IsNullOrWhiteSpace(outputDir), "Output directory path should not be empty"); + Assert.IsTrue(Directory.Exists(outputDir), $"Output directory should exist at {outputDir}"); + Console.WriteLine($"Output directory verified: {outputDir}"); + + // Verify output file name format + Assert.IsNotNull(outputFileName, "Output file name should not be null"); + Assert.IsTrue(outputFileName.StartsWith("analyze_result_"), + "Output file name should start with 'analyze_result_'"); + Assert.IsTrue(outputFileName.EndsWith(".json"), + "Output file name should end with '.json'"); + Console.WriteLine($"Output file name: {outputFileName}"); + + // Verify output file path + Assert.IsNotNull(outputPath, "Output file path should not be null"); + Assert.IsTrue(outputPath.Contains(outputDir), + "Output path should contain output directory"); + Assert.IsTrue(outputPath.EndsWith(".json"), + "Output path should end with '.json'"); + Assert.IsTrue(File.Exists(outputPath), $"Output file should exist at {outputPath}"); + Console.WriteLine($"Output file created: {outputPath}"); + + // Verify file content + var fileContent = File.ReadAllText(outputPath); + Assert.IsNotNull(fileContent, "File content should not be null"); + Assert.IsTrue(fileContent.Length > 0, "File content should not be empty"); + Assert.AreEqual(prettyJson, fileContent, "File content should match pretty JSON"); + Assert.AreEqual(prettyJson.Length, fileContent.Length, + "File content length should match pretty JSON length"); + Console.WriteLine($"File content verified: {fileContent.Length:N0} characters"); + + // Verify file can be parsed back to JSON + try + { + var fileContentJson = File.ReadAllText(outputPath); + using var fileDoc = JsonDocument.Parse(fileContentJson); + Assert.IsNotNull(fileDoc, "File content should be valid JSON"); + Assert.IsNotNull(fileDoc.RootElement, "File JSON should have root element"); + Console.WriteLine("File content is valid JSON and can be parsed"); + } + catch (JsonException ex) + { + Assert.Fail($"File content is not valid JSON: {ex.Message}"); + } + + // Verify file info + var fileInfo = new FileInfo(outputPath); + Assert.IsTrue(fileInfo.Exists, "File info should indicate file exists"); + Assert.IsTrue(fileInfo.Length > 0, "File size should be > 0"); + Assert.AreEqual(prettyJson.Length, fileInfo.Length, + "File size should match pretty JSON length"); + Console.WriteLine($"File info verified: {fileInfo.Length:N0} bytes"); + + // Get file statistics + var fileStats = new + { + Lines = prettyJson.Split('\n').Length, + Characters = prettyJson.Length, + Bytes = fileInfo.Length, + SizeKB = fileInfo.Length / 1024.0, + CreatedTime = fileInfo.CreationTimeUtc + }; + + Console.WriteLine($"\nJSON file statistics:"); + Console.WriteLine($" Path: {outputPath}"); + Console.WriteLine($" Lines: {fileStats.Lines:N0}"); + Console.WriteLine($" Characters: {fileStats.Characters:N0}"); + Console.WriteLine($" Bytes: {fileStats.Bytes:N0}"); + Console.WriteLine($" Size: {fileStats.SizeKB:F2} KB"); + Console.WriteLine($" Created: {fileStats.CreatedTime:yyyy-MM-dd HH:mm:ss} UTC"); + + Console.WriteLine("Raw JSON parsing and file creation completed successfully"); + #endregion + + #region Snippet:ContentUnderstandingExtractFromRawJson + // Extract key information from raw JSON + var resultElement = jsonDocument.RootElement.GetProperty("result"); + + if (resultElement.TryGetProperty("analyzerId", out var analyzerIdElement)) + { + Console.WriteLine($"Analyzer ID: {analyzerIdElement.GetString()}"); + } + + if (resultElement.TryGetProperty("contents", out var contentsElement) && + contentsElement.ValueKind == JsonValueKind.Array) + { + Console.WriteLine($"Contents count: {contentsElement.GetArrayLength()}"); + + if (contentsElement.GetArrayLength() > 0) + { + var firstContent = contentsElement[0]; + if (firstContent.TryGetProperty("kind", out var kindElement)) + { + Console.WriteLine($"Content kind: {kindElement.GetString()}"); + } + if (firstContent.TryGetProperty("mimeType", out var mimeTypeElement)) + { + Console.WriteLine($"MIME type: {mimeTypeElement.GetString()}"); + } + } + } + #endregion + + #region Assertion:ContentUnderstandingExtractFromRawJson + Console.WriteLine("\nJSON Structure Extraction Verification:"); + + // Verify JSON root structure + Assert.IsNotNull(jsonDocument.RootElement, "JSON root element should not be null"); + Assert.AreEqual(JsonValueKind.Object, jsonDocument.RootElement.ValueKind, + "JSON root should be an object"); + Console.WriteLine("JSON root element is an object"); + + // Verify 'result' property exists + Assert.IsTrue(jsonDocument.RootElement.TryGetProperty("result", out var resultElementVerify), + "JSON should have 'result' property"); + Assert.AreEqual(JsonValueKind.Object, resultElementVerify.ValueKind, + "Result should be an object"); + Console.WriteLine("'result' property found and is an object"); + + // Count and display all root properties + var rootPropertyCount = 0; + var rootPropertyNames = new System.Collections.Generic.List(); + foreach (var property in jsonDocument.RootElement.EnumerateObject()) + { + rootPropertyCount++; + rootPropertyNames.Add(property.Name); + } + Console.WriteLine($"Root level properties: {rootPropertyCount}"); + Console.WriteLine($" Property names: {string.Join(", ", rootPropertyNames)}"); + + // ========== Verify Analyzer ID ========== + Console.WriteLine("\nšŸ“‹ Analyzer ID Verification:"); + if (resultElementVerify.TryGetProperty("analyzerId", out var analyzerIdElementVerify)) + { + var analyzerId = analyzerIdElementVerify.GetString(); + Assert.IsNotNull(analyzerId, "Analyzer ID should not be null"); + Assert.IsFalse(string.IsNullOrWhiteSpace(analyzerId), + "Analyzer ID should not be empty"); + Assert.AreEqual("prebuilt-documentSearch", analyzerId, + "Analyzer ID should match the one used in the request"); + Console.WriteLine($"Analyzer ID verified: '{analyzerId}'"); + } + else + { + Assert.Fail("JSON result should contain 'analyzerId' property"); + } + + // ========== Verify Contents Array ========== + Console.WriteLine("\nContents Array Verification:"); + if (resultElementVerify.TryGetProperty("contents", out var contentsElementVerify)) + { + Assert.AreEqual(JsonValueKind.Array, contentsElementVerify.ValueKind, + "Contents should be an array"); + Console.WriteLine("'contents' property is an array"); + + int contentsCount = contentsElementVerify.GetArrayLength(); + Assert.IsTrue(contentsCount > 0, "Contents array should have at least one element"); + Assert.AreEqual(1, contentsCount, "PDF file should have exactly one content element"); + Console.WriteLine($"Contents count: {contentsCount}"); + + // Verify first content element + var firstContentVerify = contentsElementVerify[0]; + Assert.AreEqual(JsonValueKind.Object, firstContentVerify.ValueKind, + "Content element should be an object"); + Console.WriteLine("First content element is an object"); + + // Count and display content properties + var contentPropertyCount = 0; + var contentPropertyNames = new System.Collections.Generic.List(); + foreach (var property in firstContentVerify.EnumerateObject()) + { + contentPropertyCount++; + contentPropertyNames.Add(property.Name); + } + Console.WriteLine($"Content properties: {contentPropertyCount}"); + Console.WriteLine($" Property names: {string.Join(", ", contentPropertyNames)}"); + + // ========== Verify Kind Property ========== + Console.WriteLine("\nContent Kind Verification:"); + if (firstContentVerify.TryGetProperty("kind", out var kindElementVerify)) + { + var kind = kindElementVerify.GetString(); + Assert.IsNotNull(kind, "Content kind should not be null"); + Assert.IsFalse(string.IsNullOrWhiteSpace(kind), + "Content kind should not be empty"); + + // Verify kind is a valid value (document or media) + if (kind != null) + { + var kindLower = kind.ToLowerInvariant(); + Assert.IsTrue(kindLower == "document" || kindLower == "media", + $"Content kind should be 'document' or 'media', but was '{kind}'"); + } + } + else + { + Assert.Fail("Content element should contain 'kind' property"); + } + + // ========== Verify MIME Type Property ========== + Console.WriteLine("\nšŸ“Ž MIME Type Verification:"); + if (firstContentVerify.TryGetProperty("mimeType", out var mimeTypeElementVerify)) + { + var mimeType = mimeTypeElementVerify.GetString(); + Assert.IsNotNull(mimeType, "MIME type should not be null"); + Assert.IsFalse(string.IsNullOrWhiteSpace(mimeType), + "MIME type should not be empty"); + if (mimeType != null) + { + Assert.IsTrue(mimeType.IndexOf('/') >= 0, + $"MIME type should be in format 'type/subtype', but was '{mimeType}'"); + Assert.AreEqual("application/pdf", mimeType, + "MIME type should be 'application/pdf' for PDF files"); + } + } + else + { + Assert.Fail("Content element should contain 'mimeType' property"); + } + + // ========== Verify Additional Common Properties ========== + Console.WriteLine("\nAdditional Properties Verification:"); + + // Check for markdown property + if (firstContentVerify.TryGetProperty("markdown", out var markdownElement)) + { + if (markdownElement.ValueKind == JsonValueKind.String) + { + var markdown = markdownElement.GetString(); + Assert.IsNotNull(markdown, "Markdown property should not be null"); + } + } + else + { + Console.WriteLine("No 'markdown' property found"); + } + + // Check for startPageNumber property + if (firstContentVerify.TryGetProperty("startPageNumber", out var startPageElement)) + { + if (startPageElement.ValueKind == JsonValueKind.Number) + { + var startPage = startPageElement.GetInt32(); + Assert.IsTrue(startPage >= 1, $"Start page should be >= 1, but was {startPage}"); + Console.WriteLine($"Start page number: {startPage}"); + } + } + + // Check for endPageNumber property + if (firstContentVerify.TryGetProperty("endPageNumber", out var endPageElement)) + { + if (endPageElement.ValueKind == JsonValueKind.Number) + { + var endPage = endPageElement.GetInt32(); + Assert.IsTrue(endPage >= 1, $"End page should be >= 1, but was {endPage}"); + Console.WriteLine($"End page number: {endPage}"); + + // If both start and end page exist, verify relationship + if (firstContentVerify.TryGetProperty("startPageNumber", out var startPageCheck) && + startPageCheck.ValueKind == JsonValueKind.Number) + { + var startPage = startPageCheck.GetInt32(); + Assert.IsTrue(endPage >= startPage, + $"End page ({endPage}) should be >= start page ({startPage})"); + var totalPages = endPage - startPage + 1; + Console.WriteLine($"Total pages: {totalPages}"); + } + } + } + + // Check for pages array + if (firstContentVerify.TryGetProperty("pages", out var pagesElement)) + { + if (pagesElement.ValueKind == JsonValueKind.Array) + { + var pageCount = pagesElement.GetArrayLength(); + Console.WriteLine($"Pages array found: {pageCount} page(s)"); + } + } + + // Check for tables array + if (firstContentVerify.TryGetProperty("tables", out var tablesElement)) + { + if (tablesElement.ValueKind == JsonValueKind.Array) + { + var tableCount = tablesElement.GetArrayLength(); + Console.WriteLine($"Tables array found: {tableCount} table(s)"); + } + } + } + else + { + Assert.Fail("JSON result should contain 'contents' property"); + } + + // ========== Verify Additional Result Properties ========== + Console.WriteLine("\nAdditional Result Properties:"); + + // Check for warnings + if (resultElementVerify.TryGetProperty("warnings", out var warningsElement)) + { + if (warningsElement.ValueKind == JsonValueKind.Array) + { + var warningCount = warningsElement.GetArrayLength(); + if (warningCount > 0) + { + Console.WriteLine($"āš ļø Warnings found: {warningCount}"); + for (int i = 0; i < Math.Min(warningCount, 5); i++) + { + var warning = warningsElement[i]; + if (warning.TryGetProperty("message", out var messageElement)) + { + Console.WriteLine($" {i + 1}. {messageElement.GetString()}"); + } + } + } + else + { + Console.WriteLine("No warnings"); + } + } + } + + // Check for apiVersion + if (resultElementVerify.TryGetProperty("apiVersion", out var apiVersionElement)) + { + if (apiVersionElement.ValueKind == JsonValueKind.String) + { + var apiVersion = apiVersionElement.GetString(); + Console.WriteLine($"API version: {apiVersion}"); + } + } + + // ========== Summary ========== + Console.WriteLine("\nRaw JSON extraction and validation completed successfully:"); + Console.WriteLine($" JSON root properties: {rootPropertyCount}"); + Console.WriteLine($" Analyzer ID: verified"); + Console.WriteLine($" Contents count: verified"); + Console.WriteLine($" Content kind: verified"); + Console.WriteLine($" MIME type: verified"); + Console.WriteLine($" All required properties: present and valid"); + #endregion + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample12_GetResultFile.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample12_GetResultFile.cs new file mode 100644 index 000000000000..2a2386537dec --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample12_GetResultFile.cs @@ -0,0 +1,430 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +#nullable enable + +using System; +using System.IO; +using System.Linq; +using System.Threading.Tasks; +using Azure; +using Azure.AI.ContentUnderstanding; +using Azure.AI.ContentUnderstanding.Tests; +using Azure.Core; +using Azure.Core.TestFramework; +using NUnit.Framework; + +namespace Azure.AI.ContentUnderstanding.Samples +{ + public partial class ContentUnderstandingSamples + { + [RecordedTest] + public async Task GetResultFileAsync() + { + string endpoint = TestEnvironment.Endpoint; + var options = InstrumentClientOptions(new ContentUnderstandingClientOptions()); + var client = InstrumentClient(new ContentUnderstandingClient(new Uri(endpoint), TestEnvironment.Credential, options)); + + #region Snippet:ContentUnderstandingAnalyzeVideoForResultFiles +#if SNIPPET + Uri videoUrl = new Uri(""); + // Start the analysis operation + var analyzeOperation = await client.AnalyzeAsync( + WaitUntil.Started, + "prebuilt-videoSearch", + inputs: new[] { new AnalyzeInput { Url = videoUrl } }); + + // Get the operation ID from the operation (available after Started) + string operationId = analyzeOperation.Id; + Console.WriteLine($"Operation ID: {operationId}"); + + // Wait for completion + await analyzeOperation.WaitForCompletionAsync(); +#else + // For testing, use a video URL to get keyframes for GetResultFile testing + Uri videoUrl = new Uri("https://github.com/Azure-Samples/azure-ai-content-understanding-assets/raw/refs/heads/main/videos/sdk_samples/FlightSimulator.mp4"); + // Start the analysis operation + var analyzeOperation = await client.AnalyzeAsync( + WaitUntil.Started, + "prebuilt-videoSearch", + inputs: new[] { new AnalyzeInput { Url = videoUrl } }); + + // Get the operation ID from the operation (available after Started) + string operationId = analyzeOperation.Id; + Console.WriteLine($"Operation ID: {operationId}"); + + // Wait for completion + await analyzeOperation.WaitForCompletionAsync(); +#endif + + AnalyzeResult result = analyzeOperation.Value; + #endregion + + #region Assertion:ContentUnderstandingAnalyzeVideoForResultFiles + Assert.IsNotNull(videoUrl, "Video URL should not be null"); + Assert.IsTrue(videoUrl.IsAbsoluteUri, "Video URL should be absolute"); + Console.WriteLine($"Video URL: {videoUrl}"); + + Assert.IsNotNull(analyzeOperation, "Analyze operation should not be null"); + Console.WriteLine("Analysis operation created successfully"); + + // Verify operation ID is available immediately after WaitUntil.Started + Assert.IsNotNull(operationId, "Operation ID should not be null"); + Assert.IsFalse(string.IsNullOrWhiteSpace(operationId), "Operation ID should not be empty"); + Assert.IsTrue(operationId.Length > 0, "Operation ID should have length > 0"); + Console.WriteLine($"Operation ID obtained: {operationId}"); + + // Verify operation ID format (should be a valid identifier) + Assert.IsFalse(operationId.Contains(" "), "Operation ID should not contain spaces"); + Console.WriteLine($" Length: {operationId.Length} characters"); + + // Verify operation started + Assert.IsTrue(analyzeOperation.HasCompleted || !analyzeOperation.HasCompleted, + "Operation should have a valid completion state"); + Console.WriteLine($"Operation started (ID: {operationId})"); + + // Wait for completion and verify + Assert.IsNotNull(analyzeOperation, "Operation should not be null after waiting"); + Assert.IsTrue(analyzeOperation.HasCompleted, "Operation should be completed after WaitForCompletionAsync"); + Assert.IsTrue(analyzeOperation.HasValue, "Operation should have a value after completion"); + Console.WriteLine("Operation completed successfully"); + + // Verify raw response + var rawResponse = analyzeOperation.GetRawResponse(); + Assert.IsNotNull(rawResponse, "Raw response should not be null"); + Assert.IsTrue(rawResponse.Status >= 200 && rawResponse.Status < 300, + $"Response status should be successful, but was {rawResponse.Status}"); + Console.WriteLine($"Response status: {rawResponse.Status}"); + + // Verify result + Assert.IsNotNull(result, "Analysis result should not be null"); + Assert.IsNotNull(result.Contents, "Result should contain contents"); + Assert.IsTrue(result.Contents!.Count > 0, "Result should have at least one content"); + // Video analysis may return multiple content elements (e.g., video and audio tracks) + Assert.IsTrue(result.Contents.Count >= 1, $"Video analysis should return at least one content element, but found {result.Contents.Count}"); + Console.WriteLine($"Analysis result contains {result.Contents.Count} content(s)"); + + // Verify content type + var content = result.Contents.FirstOrDefault(); + Assert.IsNotNull(content, "Content should not be null"); + Console.WriteLine($"Content type: {content!.GetType().Name}"); + + Console.WriteLine($"\nOperation verification completed:"); + Console.WriteLine($" Operation ID: {operationId}"); + Console.WriteLine($" Status: Completed"); + Console.WriteLine($" Contents: {result.Contents.Count}"); + #endregion + + #region Snippet:ContentUnderstandingGetResultFile + // GetResultFile is used to retrieve result files (like keyframe images) from video analysis + // The path format is: "keyframes/{frameTimeMs}" where frameTimeMs is the timestamp in milliseconds + + // Example: Get a keyframe image (if available) + // Note: This example demonstrates the API pattern. In production, you would: + // 1. Analyze a video to get keyframe timestamps + // 2. Use those timestamps to construct paths like "keyframes/1000" for the frame at 1000ms + // 3. Call GetResultFileAsync with the operation ID and path + + // For video analysis, keyframes would be found in AudioVisualContent.KeyFrameTimesMs + var videoContent = result.Contents?.FirstOrDefault(c => c is AudioVisualContent) as AudioVisualContent; +#if !SNIPPET + // Test assertions (excluded from snippet) + Assert.IsNotNull(videoContent, "Test requires AudioVisualContent (video content) for GetResultFile"); + Assert.IsNotNull(videoContent!.KeyFrameTimesMs, "KeyFrameTimesMs should not be null"); + Assert.IsTrue(videoContent.KeyFrameTimesMs!.Count > 0, + $"Video content should have at least one keyframe, but found {videoContent.KeyFrameTimesMs.Count}"); +#endif + + if (videoContent?.KeyFrameTimesMs != null && videoContent.KeyFrameTimesMs.Count > 0) + { + // Print keyframe information + int totalKeyframes = videoContent.KeyFrameTimesMs.Count; + long firstFrameTimeMs = videoContent.KeyFrameTimesMs[0]; + Console.WriteLine($"Total keyframes: {totalKeyframes}"); + Console.WriteLine($"First keyframe time: {firstFrameTimeMs} ms"); + + // Get the first keyframe as an example + string framePath = $"keyframes/{firstFrameTimeMs}"; + + Console.WriteLine($"Getting result file: {framePath}"); + + // Get the result file (keyframe image) + Response fileResponse = await client.GetResultFileAsync( + operationId, + framePath); + + byte[] imageBytes = fileResponse.Value.ToArray(); + Console.WriteLine($"Retrieved keyframe image ({imageBytes.Length:N0} bytes)"); + + // Save the keyframe image to sample_output directory + string outputDir = Path.Combine(AppContext.BaseDirectory, "sample_output"); + Directory.CreateDirectory(outputDir); + string outputFileName = $"keyframe_{firstFrameTimeMs}.jpg"; + string outputPath = Path.Combine(outputDir, outputFileName); + File.WriteAllBytes(outputPath, imageBytes); + + Console.WriteLine($"Keyframe image saved to: {outputPath}"); + } + else + { + Console.WriteLine("Note: This sample demonstrates GetResultFile API usage."); + Console.WriteLine(" For video analysis with keyframes, use prebuilt-videoSearch analyzer."); + Console.WriteLine(" Keyframes are available in AudioVisualContent.KeyFrameTimesMs."); + Console.WriteLine(); + Console.WriteLine($"Example usage with operation ID '{operationId}':"); + Console.WriteLine(" Response fileResponse = await client.GetResultFileAsync("); + Console.WriteLine(" operationId, \"keyframes/1000\");"); + } + #endregion + + #region Assertion:ContentUnderstandingGetResultFile + Console.WriteLine("\nšŸŽ¬ Result File Retrieval Verification:"); + + // This test requires video content with keyframes for GetResultFile functionality + // Verify that we have video content + var videoContentVerify = result.Contents?.FirstOrDefault(c => c is AudioVisualContent) as AudioVisualContent; + Assert.IsNotNull(videoContentVerify, "Test requires AudioVisualContent (video content) for GetResultFile testing"); + Assert.IsInstanceOf(videoContentVerify, "Content should be AudioVisualContent type"); + + // Verify that keyframes are available + Assert.IsNotNull(videoContentVerify!.KeyFrameTimesMs, "KeyFrameTimesMs should not be null for video content"); + Assert.IsTrue(videoContentVerify.KeyFrameTimesMs!.Count > 0, + $"Video content should have at least one keyframe, but found {videoContentVerify.KeyFrameTimesMs.Count}"); + + // Verify video content properties + Assert.IsNotNull(videoContentVerify, "Video content should not be null"); + Console.WriteLine("Video content with keyframes detected"); + + if (videoContentVerify.KeyFrameTimesMs != null && videoContentVerify.KeyFrameTimesMs.Count > 0) + { + Console.WriteLine("Video content with keyframes detected"); + + // ========== Verify Keyframe Information ========== + Assert.IsNotNull(videoContentVerify.KeyFrameTimesMs, "KeyFrameTimesMs should not be null"); + Assert.IsTrue(videoContentVerify.KeyFrameTimesMs.Count > 0, + "Should have at least one keyframe"); + Console.WriteLine($"Total keyframes: {videoContentVerify.KeyFrameTimesMs.Count}"); + + // Verify keyframe times are valid + var invalidKeyframes = videoContentVerify.KeyFrameTimesMs.Where(t => t < 0).ToList(); + Assert.AreEqual(0, invalidKeyframes.Count, + $"All keyframe times should be non-negative, but found {invalidKeyframes.Count} negative values"); + + // Get keyframe statistics + long firstFrameTimeMs = videoContentVerify.KeyFrameTimesMs[0]; + long lastFrameTimeMs = videoContentVerify.KeyFrameTimesMs[videoContentVerify.KeyFrameTimesMs.Count - 1]; + double avgFrameInterval = videoContentVerify.KeyFrameTimesMs.Count > 1 + ? (double)(lastFrameTimeMs - firstFrameTimeMs) / (videoContentVerify.KeyFrameTimesMs.Count - 1) + : 0; + + Assert.IsTrue(firstFrameTimeMs >= 0, $"First keyframe time should be >= 0, but was {firstFrameTimeMs}"); + Assert.IsTrue(lastFrameTimeMs >= firstFrameTimeMs, + $"Last keyframe time ({lastFrameTimeMs}) should be >= first keyframe time ({firstFrameTimeMs})"); + + Console.WriteLine($" First keyframe: {firstFrameTimeMs} ms ({firstFrameTimeMs / 1000.0:F2} seconds)"); + Console.WriteLine($" Last keyframe: {lastFrameTimeMs} ms ({lastFrameTimeMs / 1000.0:F2} seconds)"); + if (videoContentVerify.KeyFrameTimesMs.Count > 1) + { + Console.WriteLine($" Average interval: {avgFrameInterval:F2} ms"); + } + + // ========== Retrieve First Keyframe ========== + Console.WriteLine("\nšŸ“„ Retrieving first keyframe..."); + string framePath = $"keyframes/{firstFrameTimeMs}"; + Assert.IsFalse(string.IsNullOrWhiteSpace(framePath), "Frame path should not be empty"); + Assert.IsTrue(framePath.StartsWith("keyframes/"), "Frame path should start with 'keyframes/'"); + Console.WriteLine($" Frame path: {framePath}"); + + Response fileResponse = await client.GetResultFileAsync(operationId, framePath); + + // Verify response + Assert.IsNotNull(fileResponse, "File response should not be null"); + Assert.IsTrue(fileResponse.HasValue, "File response should have a value"); + Assert.IsNotNull(fileResponse.Value, "File response value should not be null"); + Console.WriteLine("File response received"); + + // Verify raw response + var fileRawResponse = fileResponse.GetRawResponse(); + Assert.IsNotNull(fileRawResponse, "File raw response should not be null"); + Assert.AreEqual(200, fileRawResponse.Status, + $"File response status should be 200, but was {fileRawResponse.Status}"); + Console.WriteLine($"File response status: {fileRawResponse.Status}"); + + // Verify content type header (should be image type) + if (fileRawResponse.Headers.TryGetValue("Content-Type", out var contentType)) + { + Assert.IsTrue(contentType.StartsWith("image/"), + $"Content type should be an image type, but was '{contentType}'"); + Console.WriteLine($"Content type: {contentType}"); + } + + // ========== Verify Image Data ========== + Console.WriteLine("\nVerifying image data..."); + byte[] imageBytes = fileResponse.Value.ToArray(); + Assert.IsNotNull(imageBytes, "Image bytes should not be null"); + Assert.IsTrue(imageBytes.Length > 0, "Image should have content"); + Assert.IsTrue(imageBytes.Length >= 100, + $"Image should have reasonable size (>= 100 bytes), but was {imageBytes.Length} bytes"); + Console.WriteLine($"Image size: {imageBytes.Length:N0} bytes ({imageBytes.Length / 1024.0:F2} KB)"); + + // Verify image format (check magic bytes for common formats) + string imageFormat = "Unknown"; + if (imageBytes.Length >= 2) + { + // Check JPEG magic bytes (FF D8) + if (imageBytes[0] == 0xFF && imageBytes[1] == 0xD8) + imageFormat = "JPEG"; + // Check PNG magic bytes (89 50 4E 47) + else if (imageBytes.Length >= 4 && imageBytes[0] == 0x89 && imageBytes[1] == 0x50 && + imageBytes[2] == 0x4E && imageBytes[3] == 0x47) + imageFormat = "PNG"; + // Check GIF magic bytes (47 49 46) + else if (imageBytes.Length >= 3 && imageBytes[0] == 0x47 && imageBytes[1] == 0x49 && + imageBytes[2] == 0x46) + imageFormat = "GIF"; + // Check WebP magic bytes (52 49 46 46 ... 57 45 42 50) + else if (imageBytes.Length >= 12 && imageBytes[0] == 0x52 && imageBytes[1] == 0x49 && + imageBytes[8] == 0x57 && imageBytes[9] == 0x45 && imageBytes[10] == 0x42 && imageBytes[11] == 0x50) + imageFormat = "WebP"; + } + Console.WriteLine($"Detected image format: {imageFormat}"); + if (imageFormat != "Unknown") + { + Assert.AreNotEqual("Unknown", imageFormat, "Image format should be recognized"); + } + + // ========== Save to File ========== + Console.WriteLine("\nšŸ’¾ Saving keyframe to file..."); + string outputDir = Path.Combine(AppContext.BaseDirectory, "sample_output"); + Assert.IsNotNull(outputDir, "Output directory path should not be null"); + + Directory.CreateDirectory(outputDir); + Assert.IsTrue(Directory.Exists(outputDir), + $"Output directory should exist at {outputDir}"); + Console.WriteLine($"Output directory: {outputDir}"); + + string outputFileName = $"keyframe_{firstFrameTimeMs}.jpg"; + Assert.IsFalse(string.IsNullOrWhiteSpace(outputFileName), "Output file name should not be empty"); + Assert.IsTrue(outputFileName.Contains(firstFrameTimeMs.ToString()), + "Output file name should contain the frame timestamp"); + Console.WriteLine($" File name: {outputFileName}"); + + string outputPath = Path.Combine(outputDir, outputFileName); + Assert.IsFalse(string.IsNullOrWhiteSpace(outputPath), "Output path should not be empty"); + + File.WriteAllBytes(outputPath, imageBytes); + Assert.IsTrue(File.Exists(outputPath), + $"Keyframe image file should exist at {outputPath}"); + Console.WriteLine($"File saved: {outputPath}"); + + // ========== Verify Saved File ========== + Console.WriteLine("\nVerifying saved file..."); + var savedFileInfo = new FileInfo(outputPath); + Assert.IsTrue(savedFileInfo.Exists, "Saved file should exist"); + Assert.IsTrue(savedFileInfo.Length > 0, "Saved file should have content"); + Assert.AreEqual(imageBytes.Length, savedFileInfo.Length, + $"Saved file size ({savedFileInfo.Length}) should match retrieved image size ({imageBytes.Length})"); + Console.WriteLine($"File size verified: {savedFileInfo.Length:N0} bytes"); + + // Verify file can be read back + var readBackBytes = File.ReadAllBytes(outputPath); + Assert.AreEqual(imageBytes.Length, readBackBytes.Length, + "Read back file size should match original"); + Assert.IsTrue(imageBytes.SequenceEqual(readBackBytes), + "Read back file content should match original"); + Console.WriteLine("File content verified (read back matches original)"); + + // ========== Test Additional Keyframes (if available) ========== + if (videoContentVerify.KeyFrameTimesMs.Count > 1) + { + Console.WriteLine($"\nTesting additional keyframes ({videoContentVerify.KeyFrameTimesMs.Count - 1} more available)..."); + + // Test retrieving a middle keyframe + int middleIndex = videoContentVerify.KeyFrameTimesMs.Count / 2; + long middleFrameTimeMs = videoContentVerify.KeyFrameTimesMs[middleIndex]; + string middleFramePath = $"keyframes/{middleFrameTimeMs}"; + + var middleFileResponse = await client.GetResultFileAsync(operationId, middleFramePath); + Assert.IsNotNull(middleFileResponse, "Middle keyframe response should not be null"); + Assert.IsTrue(middleFileResponse.Value.ToArray().Length > 0, + "Middle keyframe should have content"); + Console.WriteLine($"Successfully retrieved keyframe at index {middleIndex} ({middleFrameTimeMs} ms)"); + Console.WriteLine($" Size: {middleFileResponse.Value.ToArray().Length:N0} bytes"); + } + + // ========== Summary ========== + Console.WriteLine($"\nKeyframe retrieval verification completed successfully:"); + Console.WriteLine($" Operation ID: {operationId}"); + Console.WriteLine($" Total keyframes: {videoContentVerify.KeyFrameTimesMs.Count}"); + Console.WriteLine($" First keyframe time: {firstFrameTimeMs} ms"); + Console.WriteLine($" Image format: {imageFormat}"); + Console.WriteLine($" Image size: {imageBytes.Length:N0} bytes"); + Console.WriteLine($" Saved to: {outputPath}"); + Console.WriteLine($" File verified: Yes"); + } + else + { + // ========== No Video Content (Expected for Document Analysis) ========== + Console.WriteLine("No video content with keyframes detected"); + Console.WriteLine(" This is expected for document analysis"); + + // Verify content type + var documentContent = result.Contents?.FirstOrDefault() as DocumentContent; + if (documentContent != null) + { + Console.WriteLine($"Content type: DocumentContent (as expected)"); + Console.WriteLine($" MIME type: {documentContent.MimeType ?? "(not specified)"}"); + Console.WriteLine($" Pages: {documentContent.StartPageNumber} - {documentContent.EndPageNumber}"); + } + else + { + var mediaContent = result.Contents?.FirstOrDefault() as MediaContent; + if (mediaContent != null) + { + Console.WriteLine($"Content type: MediaContent"); + } + else + { + Console.WriteLine($"Content type: {result.Contents?.FirstOrDefault()?.GetType().Name ?? "Unknown"}"); + } + } + + // Verify the API pattern is demonstrated + Assert.IsNotNull(operationId, "Operation ID should be available for GetResultFile API"); + Assert.IsFalse(string.IsNullOrWhiteSpace(operationId), "Operation ID should not be empty"); + Console.WriteLine($"Operation ID available for GetResultFile API: {operationId}"); + + // Test error handling for non-existent file path + Console.WriteLine("\n🧪 Testing error handling for invalid path..."); + try + { + var invalidResponse = await client.GetResultFileAsync(operationId, "keyframes/0"); + Console.WriteLine($"āš ļø Request succeeded (status: {invalidResponse.GetRawResponse().Status})"); + Console.WriteLine(" This may indicate the service returns data even for non-existent keyframes"); + } + catch (RequestFailedException ex) + { + Assert.IsTrue(ex.Status == 404 || ex.Status == 400, + $"Expected 404 or 400 for non-existent keyframe, but got {ex.Status}"); + Console.WriteLine($"Correctly returned error status {ex.Status} for non-existent keyframe"); + } + + // ========== API Usage Example ========== + Console.WriteLine($"\nšŸ“š GetResultFile API Usage Example:"); + Console.WriteLine(" For video analysis with keyframes:"); + Console.WriteLine(" 1. Analyze video with prebuilt-videoSearch"); + Console.WriteLine(" 2. Get keyframe times from AudioVisualContent. KeyFrameTimesMs"); + Console.WriteLine(" 3. Retrieve keyframes using GetResultFileAsync:"); + Console.WriteLine($" var response = await client.GetResultFileAsync(\"{operationId}\", \"keyframes/1000\");"); + Console.WriteLine(" 4. Save or process the keyframe image"); + + Console.WriteLine($"\nGetResultFile API pattern demonstration completed:"); + Console.WriteLine($" Operation ID: {operationId}"); + Console.WriteLine($" Content type: Document (not video)"); + Console.WriteLine($" API availability: Verified"); + Console.WriteLine($" Error handling: Tested"); + } + #endregion + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample13_DeleteResult.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample13_DeleteResult.cs new file mode 100644 index 000000000000..86abb49c948f --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample13_DeleteResult.cs @@ -0,0 +1,317 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +#nullable enable + +using System; +using System.Linq; +using System.Threading.Tasks; +using Azure; +using Azure.AI.ContentUnderstanding; +using Azure.AI.ContentUnderstanding.Tests; +using Azure.Core; +using Azure.Core.TestFramework; +using NUnit.Framework; + +namespace Azure.AI.ContentUnderstanding.Samples +{ + public partial class ContentUnderstandingSamples + { + [RecordedTest] + public async Task DeleteResultAsync() + { + string endpoint = TestEnvironment.Endpoint; + var options = InstrumentClientOptions(new ContentUnderstandingClientOptions()); + var client = InstrumentClient(new ContentUnderstandingClient(new Uri(endpoint), TestEnvironment.Credential, options)); + + #region Snippet:ContentUnderstandingAnalyzeAndDeleteResult +#if SNIPPET + Uri documentUrl = new Uri(""); +#else + Uri documentUrl = ContentUnderstandingClientTestEnvironment.CreateUri("invoice.pdf"); +#endif + + // Step 1: Start the analysis operation + var analyzeOperation = await client.AnalyzeAsync( + WaitUntil.Started, + "prebuilt-invoice", + inputs: new[] { new AnalyzeInput { Url = documentUrl } }); + // Get the operation ID from the operation (available after Started) + string operationId = analyzeOperation.Id; + Console.WriteLine($"Operation ID: {operationId}"); + + // Wait for completion + await analyzeOperation.WaitForCompletionAsync(); + AnalyzeResult result = analyzeOperation.Value; + Console.WriteLine("Analysis completed successfully!"); + + // Display some sample results + if (result.Contents?.FirstOrDefault() is DocumentContent docContent && docContent.Fields != null) + { + Console.WriteLine($"Total fields extracted: {docContent.Fields.Count}"); + if (docContent.Fields.TryGetValue("CustomerName", out var customerNameField) && customerNameField is StringField sf) + { + Console.WriteLine($"Customer Name: {sf.ValueString ?? "(not found)"}"); + } + } + + // Step 2: Delete the analysis result + Console.WriteLine($"Deleting analysis result (Operation ID: {operationId})..."); + await client.DeleteResultAsync(operationId); + Console.WriteLine("Analysis result deleted successfully!"); + #endregion + + #region Assertion:ContentUnderstandingAnalyzeAndDeleteResult + Console.WriteLine("šŸ“‹ Analysis Operation Verification:"); + + // ========== Step 1: Verify Analysis Operation ========== + Assert.IsNotNull(documentUrl, "Document URL should not be null"); + Assert.IsTrue(documentUrl.IsAbsoluteUri, "Document URL should be absolute"); + Console.WriteLine($"Document URL: {documentUrl}"); + + Assert.IsNotNull(analyzeOperation, "Analyze operation should not be null"); + Console.WriteLine("Analysis operation created"); + + // Verify operation ID is available immediately after WaitUntil.Started + Assert.IsNotNull(operationId, "Operation ID should not be null"); + Assert.IsFalse(string.IsNullOrWhiteSpace(operationId), "Operation ID should not be empty"); + Assert.IsTrue(operationId.Length > 0, "Operation ID should have length > 0"); + Assert.IsFalse(operationId.Contains(" "), "Operation ID should not contain spaces"); + Console.WriteLine($"Operation ID obtained: {operationId}"); + Console.WriteLine($" Length: {operationId.Length} characters"); + + // Verify operation completed + Assert.IsTrue(analyzeOperation.HasCompleted, "Operation should be completed after WaitForCompletionAsync"); + Assert.IsTrue(analyzeOperation.HasValue, "Operation should have a value after completion"); + Console.WriteLine("Operation completed successfully"); + + // Verify raw response + var rawResponse = analyzeOperation.GetRawResponse(); + Assert.IsNotNull(rawResponse, "Raw response should not be null"); + Assert.IsTrue(rawResponse.Status >= 200 && rawResponse.Status < 300, + $"Response status should be successful, but was {rawResponse.Status}"); + Console.WriteLine($"Response status: {rawResponse.Status}"); + + // ========== Verify Analysis Result ========== + Console.WriteLine("\nAnalysis Result Verification:"); + + Assert.IsNotNull(result, "Analysis result should not be null"); + Assert.IsNotNull(result.Contents, "Result should contain contents"); + Assert.IsTrue(result.Contents!.Count > 0, "Result should have at least one content"); + Assert.AreEqual(1, result.Contents.Count, "Invoice should have exactly one content element"); + Console.WriteLine($"Analysis result contains {result.Contents.Count} content(s)"); + + // Verify content structure + var documentContent = result.Contents?.FirstOrDefault() as DocumentContent; + Assert.IsNotNull(documentContent, "Content should be DocumentContent"); + Assert.IsNotNull(documentContent!.Fields, "Document content should have fields"); + Assert.IsTrue(documentContent.Fields.Count >= 0, "Fields collection should be valid"); + Console.WriteLine($"Document content has {documentContent.Fields.Count} field(s)"); + + // Verify common invoice fields if present + var fieldsFound = new System.Collections.Generic.List(); + var commonFields = new[] { "CustomerName", "InvoiceDate", "TotalAmount", "LineItems" }; + foreach (var fieldName in commonFields) + { + if (documentContent.Fields.ContainsKey(fieldName)) + { + fieldsFound.Add(fieldName); + var field = documentContent.Fields[fieldName]; + + if (field is StringField sf && !string.IsNullOrWhiteSpace(sf.ValueString)) + { + Console.WriteLine($" {fieldName}: {sf.ValueString}"); + } + else if (field is ObjectField of) + { + var propertyCount = of.Value is System.Collections.IDictionary dict ? dict.Count : 0; + Console.WriteLine($" {fieldName}: [Object with {propertyCount} properties]"); + } + else if (field is ArrayField af) + { + Console.WriteLine($" {fieldName}: [Array with {af.Count} items]"); + } + else + { + Console.WriteLine($" {fieldName}: [Found]"); + } + } + } + + if (fieldsFound.Count > 0) + { + Console.WriteLine($"Found {fieldsFound.Count}/{commonFields.Length} common invoice fields"); + } + + // Verify analyzer ID + if (!string.IsNullOrWhiteSpace(result.AnalyzerId)) + { + Assert.AreEqual("prebuilt-invoice", result.AnalyzerId, + "Analyzer ID should match the one used in the request"); + Console.WriteLine($"Analyzer ID verified: {result.AnalyzerId}"); + } + + Console.WriteLine($"\nAnalysis verification completed:"); + Console.WriteLine($" Operation ID: {operationId}"); + Console.WriteLine($" Status: Completed"); + Console.WriteLine($" Fields extracted: {documentContent.Fields.Count}"); + + // ========== Step 2: Verify Result Deletion ========== + Console.WriteLine("\nResult Deletion Verification:"); + + bool deletionSucceeded = false; + try + { + await client.DeleteResultAsync(operationId); + deletionSucceeded = true; + Console.WriteLine($"DeleteResultAsync succeeded for operation ID: {operationId}"); + } + catch (RequestFailedException ex) + { + Console.WriteLine($"āŒ DeleteResultAsync failed with status {ex.Status}: {ex.Message}"); + Assert.Fail($"First deletion attempt should succeed, but got status {ex.Status}: {ex.Message}"); + } + catch (Exception ex) + { + Console.WriteLine($"āŒ Unexpected exception: {ex.GetType().Name}: {ex.Message}"); + Assert.Fail($"Unexpected exception during deletion: {ex.GetType().Name}: {ex.Message}"); + } + + Assert.IsTrue(deletionSucceeded, "First deletion should succeed"); + + // ========== Verify Result No Longer Accessible ========== + Console.WriteLine("\nVerifying result is deleted.. ."); + + // Try to delete again to verify the result no longer exists + bool secondDeletionFailed = false; + int? secondDeletionStatus = null; + string? secondDeletionError = null; + + try + { + await client.DeleteResultAsync(operationId); + + // If we reach here, the service allows idempotent deletion + Console.WriteLine("Second delete succeeded (service allows idempotent deletion)"); + Console.WriteLine(" Result is either deleted or deletion is idempotent"); + } + catch (RequestFailedException ex) + { + secondDeletionFailed = true; + secondDeletionStatus = ex.Status; + secondDeletionError = ex.Message; + + Console.WriteLine($"Second delete failed as expected"); + Console.WriteLine($" Status code: {ex.Status}"); + Console.WriteLine($" Error code: {ex.ErrorCode ?? "(none)"}"); + Console.WriteLine($" Message: {ex.Message}"); + + // Verify status code is 404 (Not Found) or 400 (Bad Request) or 409 (Conflict) + Assert.IsTrue(ex.Status == 404 || ex.Status == 400 || ex.Status == 409, + $"Expected 404 (Not Found), 400 (Bad Request), or 409 (Conflict) for already deleted result, but got {ex.Status}"); + + if (ex.Status == 404) + { + Console.WriteLine("Status 404 (Not Found) confirms result was deleted"); + } + else if (ex.Status == 400) + { + Console.WriteLine("Status 400 (Bad Request) confirms result does not exist"); + } + else if (ex.Status == 409) + { + Console.WriteLine("Status 409 (Conflict) indicates result is already deleted"); + } + } + catch (Exception ex) + { + Console.WriteLine($"āŒ Unexpected exception type: {ex.GetType().Name}"); + Console.WriteLine($" Message: {ex.Message}"); + Assert.Fail($"Expected RequestFailedException for second deletion, but got {ex.GetType().Name}: {ex.Message}"); + } + + // ========== Additional Verification: Try to Access Deleted Result ========== + Console.WriteLine("\nšŸ”Ž Testing access to deleted result..."); + + // Try to get result files (should fail if result is truly deleted) + bool resultFileAccessFailed = false; + int? resultFileStatus = null; + + try + { + // Attempt to access a result file (e.g., trying to get a non-existent keyframe) + // This should fail if the result is deleted + var fileResponse = await client.GetResultFileAsync(operationId, "test_file"); + + Console.WriteLine($"āš ļø GetResultFileAsync succeeded with status {fileResponse.GetRawResponse().Status}"); + Console.WriteLine(" This may indicate the service still has some data, or handles deletion differently"); + } + catch (RequestFailedException ex) + { + resultFileAccessFailed = true; + resultFileStatus = ex.Status; + + Console.WriteLine($"GetResultFileAsync failed as expected"); + Console.WriteLine($" Status code: {ex.Status}"); + + Assert.IsTrue(ex.Status == 404 || ex.Status == 400, + $"Expected 404 or 400 for accessing deleted result files, but got {ex.Status}"); + + if (ex.Status == 404) + { + Console.WriteLine("Status 404 confirms result files are not accessible"); + } + else if (ex.Status == 400) + { + Console.WriteLine("Status 400 confirms operation does not exist"); + } + } + catch (Exception ex) + { + Console.WriteLine($"āš ļø Unexpected exception: {ex.GetType().Name}: {ex.Message}"); + // Don't fail here as this is additional verification + } + + // ========== Deletion Behavior Summary ========== + Console.WriteLine("\nDeletion Behavior Summary:"); + Console.WriteLine($" First deletion: Succeeded"); + + if (secondDeletionFailed) + { + Console.WriteLine($" Second deletion: āŒ Failed with status {secondDeletionStatus}"); + Console.WriteLine($" Behavior: Deletion is NOT idempotent (expected)"); + } + else + { + Console.WriteLine($" Second deletion: Succeeded"); + Console.WriteLine($" Behavior: Deletion IS idempotent"); + } + + if (resultFileAccessFailed) + { + Console.WriteLine($" Result file access: āŒ Failed with status {resultFileStatus}"); + Console.WriteLine($" Confirmation: Result is fully deleted"); + } + else + { + Console.WriteLine($" Result file access: Test skipped or succeeded"); + } + + // ========== Final Verification ========== + Console.WriteLine($"\nDeleteResult verification completed successfully:"); + Console.WriteLine($" Operation ID: {operationId}"); + Console.WriteLine($" Analysis: Completed successfully"); + Console.WriteLine($" Fields extracted: {documentContent.Fields.Count}"); + Console.WriteLine($" Deletion: Successful"); + Console.WriteLine($" Verification: Result is deleted or no longer accessible"); + + // Verify all critical assertions passed + Assert.IsTrue(deletionSucceeded, "Deletion should have succeeded"); + Assert.IsTrue(secondDeletionFailed || !secondDeletionFailed, + "Second deletion result is acceptable either way (idempotent or not)"); + + Console.WriteLine("All deletion operations and verifications completed"); + #endregion + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample14_CopyAnalyzer.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample14_CopyAnalyzer.cs new file mode 100644 index 000000000000..e8748f788402 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample14_CopyAnalyzer.cs @@ -0,0 +1,607 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +#nullable enable + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using Azure; +using Azure.AI.ContentUnderstanding; +using Azure.AI.ContentUnderstanding.Tests; +using Azure.Core; +using Azure.Core.TestFramework; +using NUnit.Framework; + +namespace Azure.AI.ContentUnderstanding.Samples +{ + public partial class ContentUnderstandingSamples + { + [RecordedTest] + public async Task CopyAnalyzerAsync() + { + string endpoint = TestEnvironment.Endpoint; + var options = InstrumentClientOptions(new ContentUnderstandingClientOptions()); + var client = InstrumentClient(new ContentUnderstandingClient(new Uri(endpoint), TestEnvironment.Credential, options)); + + // Generate unique analyzer IDs (deterministic for playback) + string defaultSourceId = $"test_analyzer_source_{Recording.Random.NewGuid().ToString("N")}"; + string defaultTargetId = $"test_analyzer_target_{Recording.Random.NewGuid().ToString("N")}"; + string sourceAnalyzerId = Recording.GetVariable("copySourceAnalyzerId", defaultSourceId) ?? defaultSourceId; + string targetAnalyzerId = Recording.GetVariable("copyTargetAnalyzerId", defaultTargetId) ?? defaultTargetId; + + // Step 1: Create the source analyzer + var sourceConfig = new ContentAnalyzerConfig + { + EnableFormula = false, + EnableLayout = true, + EnableOcr = true, + EstimateFieldSourceAndConfidence = true, + ReturnDetails = true + }; + + var sourceFieldSchema = new ContentFieldSchema( + new Dictionary + { + ["company_name"] = new ContentFieldDefinition + { + Type = ContentFieldType.String, + Method = GenerationMethod.Extract, + Description = "Name of the company" + }, + ["total_amount"] = new ContentFieldDefinition + { + Type = ContentFieldType.Number, + Method = GenerationMethod.Extract, + Description = "Total amount on the document" + } + }) + { + Name = "company_schema", + Description = "Schema for extracting company information" + }; + + var sourceAnalyzer = new ContentAnalyzer + { + BaseAnalyzerId = "prebuilt-document", + Description = "Source analyzer for copying", + Config = sourceConfig, + FieldSchema = sourceFieldSchema + }; + sourceAnalyzer.Models.Add("completion", "gpt-4.1"); + sourceAnalyzer.Tags.Add("modelType", "in_development"); + + var createOperation = await client.CreateAnalyzerAsync( + WaitUntil.Completed, + sourceAnalyzerId, + sourceAnalyzer); + var sourceResult = createOperation.Value; + Console.WriteLine($"Source analyzer '{sourceAnalyzerId}' created successfully!"); + + #region Assertion:ContentUnderstandingCreateSourceAnalyzer + Console.WriteLine("šŸ“‹ Source Analyzer Creation Verification:"); + + // Verify analyzer IDs + Assert.IsNotNull(sourceAnalyzerId, "Source analyzer ID should not be null"); + Assert.IsFalse(string.IsNullOrWhiteSpace(sourceAnalyzerId), "Source analyzer ID should not be empty"); + Assert.IsNotNull(targetAnalyzerId, "Target analyzer ID should not be null"); + Assert.IsFalse(string.IsNullOrWhiteSpace(targetAnalyzerId), "Target analyzer ID should not be empty"); + Assert.AreNotEqual(sourceAnalyzerId, targetAnalyzerId, "Source and target IDs should be different"); + Console.WriteLine($"Source analyzer ID: {sourceAnalyzerId}"); + Console.WriteLine($"Target analyzer ID: {targetAnalyzerId}"); + + // Verify source analyzer configuration + Assert.IsNotNull(sourceConfig, "Source config should not be null"); + Assert.AreEqual(false, sourceConfig.EnableFormula, "EnableFormula should be false"); + Assert.AreEqual(true, sourceConfig.EnableLayout, "EnableLayout should be true"); + Assert.AreEqual(true, sourceConfig.EnableOcr, "EnableOcr should be true"); + Assert.AreEqual(true, sourceConfig.EstimateFieldSourceAndConfidence, "EstimateFieldSourceAndConfidence should be true"); + Assert.AreEqual(true, sourceConfig.ReturnDetails, "ReturnDetails should be true"); + Console.WriteLine("Source config verified"); + + // Verify source field schema + Assert.IsNotNull(sourceFieldSchema, "Source field schema should not be null"); + Assert.AreEqual("company_schema", sourceFieldSchema.Name, "Field schema name should match"); + Assert.AreEqual("Schema for extracting company information", sourceFieldSchema.Description, "Field schema description should match"); + Assert.AreEqual(2, sourceFieldSchema.Fields.Count, "Should have 2 fields"); + Console.WriteLine($"Source field schema verified: {sourceFieldSchema.Name}"); + + // Verify individual fields + Assert.IsTrue(sourceFieldSchema.Fields.ContainsKey("company_name"), "Should contain company_name field"); + var companyNameField = sourceFieldSchema.Fields["company_name"]; + Assert.AreEqual(ContentFieldType.String, companyNameField.Type, "company_name should be String type"); + Assert.AreEqual(GenerationMethod.Extract, companyNameField.Method, "company_name should use Extract method"); + Console.WriteLine(" company_name field verified"); + + Assert.IsTrue(sourceFieldSchema.Fields.ContainsKey("total_amount"), "Should contain total_amount field"); + var totalAmountField = sourceFieldSchema.Fields["total_amount"]; + Assert.AreEqual(ContentFieldType.Number, totalAmountField.Type, "total_amount should be Number type"); + Assert.AreEqual(GenerationMethod.Extract, totalAmountField.Method, "total_amount should use Extract method"); + Console.WriteLine(" total_amount field verified"); + + // Verify source analyzer object + Assert.IsNotNull(sourceAnalyzer, "Source analyzer object should not be null"); + Assert.AreEqual("prebuilt-document", sourceAnalyzer.BaseAnalyzerId, "Base analyzer ID should match"); + Assert.AreEqual("Source analyzer for copying", sourceAnalyzer.Description, "Description should match"); + Assert.IsTrue(sourceAnalyzer.Models.ContainsKey("completion"), "Should have completion model"); + Assert.AreEqual("gpt-4.1", sourceAnalyzer.Models["completion"], "Completion model should be gpt-4.1"); + Assert.IsTrue(sourceAnalyzer.Tags.ContainsKey("modelType"), "Should have modelType tag"); + Assert.AreEqual("in_development", sourceAnalyzer.Tags["modelType"], "modelType tag should be in_development"); + Console.WriteLine("Source analyzer object verified"); + + // Verify create operation + Assert.IsNotNull(createOperation, "Create source analyzer operation should not be null"); + Assert.IsTrue(createOperation.HasCompleted, "Operation should be completed"); + Assert.IsTrue(createOperation.HasValue, "Operation should have a value"); + Assert.IsNotNull(createOperation.GetRawResponse(), "Create source analyzer operation should have a raw response"); + Assert.IsTrue(createOperation.GetRawResponse().Status >= 200 && createOperation.GetRawResponse().Status < 300, + $"Response status should be successful, but was {createOperation.GetRawResponse().Status}"); + Console.WriteLine($"Create operation status: {createOperation.GetRawResponse().Status}"); + + // Verify source result + Assert.IsNotNull(sourceResult, "Source analyzer result should not be null"); + Assert.AreEqual("prebuilt-document", sourceResult.BaseAnalyzerId, "Base analyzer ID should match"); + Assert.AreEqual("Source analyzer for copying", sourceResult.Description, "Description should match"); + Console.WriteLine($"Source analyzer created: '{sourceAnalyzerId}'"); + + // Verify config in result + Assert.IsNotNull(sourceResult.Config, "Config should not be null"); + Assert.AreEqual(false, sourceResult.Config.EnableFormula, "EnableFormula should be false"); + Assert.AreEqual(true, sourceResult.Config.EnableLayout, "EnableLayout should be true"); + Assert.AreEqual(true, sourceResult.Config.EnableOcr, "EnableOcr should be true"); + Assert.AreEqual(true, sourceResult.Config.EstimateFieldSourceAndConfidence, "EstimateFieldSourceAndConfidence should be true"); + Assert.AreEqual(true, sourceResult.Config.ReturnDetails, "ReturnDetails should be true"); + Console.WriteLine("Config preserved in result"); + // Verify field schema in result + Assert.IsNotNull(sourceResult.FieldSchema, "Field schema should not be null"); + Assert.AreEqual("company_schema", sourceResult.FieldSchema.Name, "Field schema name should match"); + Assert.AreEqual(2, sourceResult.FieldSchema.Fields.Count, "Should have 2 fields"); + Assert.IsTrue(sourceResult.FieldSchema.Fields.ContainsKey("company_name"), "Should contain company_name field"); + Assert.IsTrue(sourceResult.FieldSchema.Fields.ContainsKey("total_amount"), "Should contain total_amount field"); + Console.WriteLine($"Field schema preserved in result: {sourceResult.FieldSchema.Fields.Count} fields"); + + // Verify tags in result + Assert.IsNotNull(sourceResult.Tags, "Tags should not be null"); + Assert.IsTrue(sourceResult.Tags.ContainsKey("modelType"), "Should contain modelType tag"); + Assert.AreEqual("in_development", sourceResult.Tags["modelType"], "modelType tag should match"); + Console.WriteLine($"Tags preserved in result: {sourceResult.Tags.Count} tag(s)"); + + // Verify models in result + Assert.IsNotNull(sourceResult.Models, "Models should not be null"); + Assert.IsTrue(sourceResult.Models.ContainsKey("completion"), "Should have completion model"); + Assert.AreEqual("gpt-4.1", sourceResult.Models["completion"], "Completion model should match"); + Console.WriteLine($"Models preserved in result: {sourceResult.Models.Count} model(s)"); + + Console.WriteLine($"\nSource analyzer creation completed:"); + Console.WriteLine($" ID: {sourceAnalyzerId}"); + Console.WriteLine($" Base: {sourceResult.BaseAnalyzerId}"); + Console.WriteLine($" Fields: {sourceResult.FieldSchema.Fields.Count}"); + Console.WriteLine($" Tags: {sourceResult.Tags.Count}"); + Console.WriteLine($" Models: {sourceResult.Models.Count}"); + #endregion + + // Get the source analyzer to see its description and tags before copying + var sourceResponse = await client.GetAnalyzerAsync(sourceAnalyzerId); + ContentAnalyzer sourceAnalyzerInfo = sourceResponse.Value; + Console.WriteLine($"Source analyzer description: {sourceAnalyzerInfo.Description}"); + Console.WriteLine($"Source analyzer tags: {string.Join(", ", sourceAnalyzerInfo.Tags.Select(kvp => $"{kvp.Key}={kvp.Value}"))}"); + + #region Assertion:ContentUnderstandingGetSourceAnalyzer + Console.WriteLine("\nSource Analyzer Retrieval Verification:"); + + Assert.IsNotNull(sourceResponse, "Source analyzer response should not be null"); + Assert.IsTrue(sourceResponse.HasValue, "Source analyzer response should have a value"); + Assert.IsNotNull(sourceAnalyzerInfo, "Source analyzer info should not be null"); + Console.WriteLine("Source analyzer retrieved successfully"); + + // Verify raw response + var sourceRawResponse = sourceResponse.GetRawResponse(); + Assert.IsNotNull(sourceRawResponse, "Raw response should not be null"); + Assert.AreEqual(200, sourceRawResponse.Status, $"Response status should be 200, but was {sourceRawResponse.Status}"); + Console.WriteLine($"Response status: {sourceRawResponse.Status}"); + + // Verify basic properties + Assert.AreEqual("Source analyzer for copying", sourceAnalyzerInfo.Description, + "Source description should match"); + Assert.AreEqual("prebuilt-document", sourceAnalyzerInfo.BaseAnalyzerId, + "Base analyzer ID should match"); + Console.WriteLine($"Description: '{sourceAnalyzerInfo.Description}'"); + Console.WriteLine($"Base analyzer: {sourceAnalyzerInfo.BaseAnalyzerId}"); + + // Verify tags + Assert.IsNotNull(sourceAnalyzerInfo.Tags, "Tags should not be null"); + Assert.IsTrue(sourceAnalyzerInfo.Tags.ContainsKey("modelType"), + "Source should contain modelType tag"); + Assert.AreEqual("in_development", sourceAnalyzerInfo.Tags["modelType"], + "Source modelType tag should be 'in_development'"); + Console.WriteLine($"Tags verified: modelType={sourceAnalyzerInfo.Tags["modelType"]}"); + + // Verify field schema + Assert.IsNotNull(sourceAnalyzerInfo.FieldSchema, "Field schema should not be null"); + Assert.AreEqual("company_schema", sourceAnalyzerInfo.FieldSchema.Name, "Field schema name should match"); + Assert.AreEqual(2, sourceAnalyzerInfo.FieldSchema.Fields.Count, "Should have 2 fields"); + Console.WriteLine($"Field schema: {sourceAnalyzerInfo.FieldSchema.Name} ({sourceAnalyzerInfo.FieldSchema.Fields.Count} fields)"); + + // Verify config + Assert.IsNotNull(sourceAnalyzerInfo.Config, "Config should not be null"); + Console.WriteLine("Config present"); + + // Verify models + Assert.IsNotNull(sourceAnalyzerInfo.Models, "Models should not be null"); + Assert.IsTrue(sourceAnalyzerInfo.Models.ContainsKey("completion"), "Should have completion model"); + Console.WriteLine($"Models: {sourceAnalyzerInfo.Models.Count} model(s)"); + + Console.WriteLine($"Source analyzer retrieval verification completed"); + #endregion + + try + { + // Step 2: Copy the source analyzer to target + // Note: This copies within the same resource. For cross-resource copying, use GrantCopyAuth sample. + #region Snippet:ContentUnderstandingCopyAnalyzer +#if SNIPPET + await client.CopyAnalyzerAsync( + WaitUntil.Completed, + targetAnalyzerId, + sourceAnalyzerId); +#else + await client.CopyAnalyzerAsync( + WaitUntil.Completed, + targetAnalyzerId, + sourceAnalyzerId); +#endif + #endregion + + #region Assertion:ContentUnderstandingCopyAnalyzer + Console.WriteLine("\nšŸ“‹ Analyzer Copy Verification:"); + + // Verify the target analyzer was created by copying + var copiedResponse = await client.GetAnalyzerAsync(targetAnalyzerId); + Assert.IsNotNull(copiedResponse, "Copied analyzer response should not be null"); + Assert.IsTrue(copiedResponse.HasValue, "Copied analyzer response should have a value"); + Console.WriteLine($"Target analyzer '{targetAnalyzerId}' retrieved successfully"); + + // Verify raw response + var copiedRawResponse = copiedResponse.GetRawResponse(); + Assert.IsNotNull(copiedRawResponse, "Raw response should not be null"); + Assert.AreEqual(200, copiedRawResponse.Status, $"Response status should be 200, but was {copiedRawResponse.Status}"); + Console.WriteLine($"Response status: {copiedRawResponse.Status}"); + + ContentAnalyzer copiedAnalyzer = copiedResponse.Value; + Assert.IsNotNull(copiedAnalyzer, "Copied analyzer should not be null"); + + // ========== Verify Base Properties ========== + Console.WriteLine("\nVerifying copied properties.. ."); + + Assert.IsNotNull(sourceAnalyzerInfo.BaseAnalyzerId, "Source base analyzer ID should not be null"); + Assert.IsNotNull(copiedAnalyzer.BaseAnalyzerId, "Copied base analyzer ID should not be null"); + Assert.AreEqual(sourceAnalyzerInfo.BaseAnalyzerId, copiedAnalyzer.BaseAnalyzerId, + $"Copied analyzer should have same base analyzer ID, but got '{copiedAnalyzer.BaseAnalyzerId}' instead of '{sourceAnalyzerInfo.BaseAnalyzerId}'"); + Console.WriteLine($"Base analyzer ID: {copiedAnalyzer.BaseAnalyzerId}"); + + Assert.IsNotNull(sourceAnalyzerInfo.Description, "Source description should not be null"); + Assert.IsNotNull(copiedAnalyzer.Description, "Copied description should not be null"); + Assert.AreEqual(sourceAnalyzerInfo.Description, copiedAnalyzer.Description, + $"Copied analyzer should have same description, but got '{copiedAnalyzer.Description}' instead of '{sourceAnalyzerInfo.Description}'"); + Console.WriteLine($"Description: '{copiedAnalyzer.Description}'"); + + // ========== Verify Field Schema ========== + Console.WriteLine("\nVerifying field schema..."); + + Assert.IsNotNull(copiedAnalyzer.FieldSchema, "Copied analyzer should have field schema"); + Assert.IsNotNull(sourceAnalyzerInfo.FieldSchema, "Source analyzer should have field schema"); + Assert.AreEqual(sourceAnalyzerInfo.FieldSchema.Name, copiedAnalyzer.FieldSchema.Name, + "Field schema name should match"); + Assert.AreEqual(sourceAnalyzerInfo.FieldSchema.Fields.Count, copiedAnalyzer.FieldSchema.Fields.Count, + $"Copied analyzer should have same number of fields ({sourceAnalyzerInfo.FieldSchema.Fields.Count}), but got {copiedAnalyzer.FieldSchema.Fields.Count}"); + Console.WriteLine($"Field schema: {copiedAnalyzer.FieldSchema.Name} ({copiedAnalyzer.FieldSchema.Fields.Count} fields)"); + + // Verify individual fields + Assert.IsTrue(copiedAnalyzer.FieldSchema.Fields.ContainsKey("company_name"), + "Copied analyzer should contain company_name field"); + var copiedCompanyNameField = copiedAnalyzer.FieldSchema.Fields["company_name"]; + var sourceCompanyNameField = sourceAnalyzerInfo.FieldSchema.Fields["company_name"]; + Assert.AreEqual(sourceCompanyNameField.Type, copiedCompanyNameField.Type, + "company_name field type should match"); + Assert.AreEqual(sourceCompanyNameField.Method, copiedCompanyNameField.Method, + "company_name field method should match"); + Assert.AreEqual(sourceCompanyNameField.Description, copiedCompanyNameField.Description, + "company_name field description should match"); + Console.WriteLine(" company_name field copied correctly"); + + Assert.IsTrue(copiedAnalyzer.FieldSchema.Fields.ContainsKey("total_amount"), + "Copied analyzer should contain total_amount field"); + var copiedTotalAmountField = copiedAnalyzer.FieldSchema.Fields["total_amount"]; + var sourceTotalAmountField = sourceAnalyzerInfo.FieldSchema.Fields["total_amount"]; + Assert.AreEqual(sourceTotalAmountField.Type, copiedTotalAmountField.Type, + "total_amount field type should match"); + Assert.AreEqual(sourceTotalAmountField.Method, copiedTotalAmountField.Method, + "total_amount field method should match"); + Assert.AreEqual(sourceTotalAmountField.Description, copiedTotalAmountField.Description, + "total_amount field description should match"); + Console.WriteLine(" total_amount field copied correctly"); + + // ========== Verify Tags ========== + Console.WriteLine("\nVerifying tags.. ."); + + Assert.IsNotNull(copiedAnalyzer.Tags, "Copied analyzer should have tags"); + Assert.IsTrue(copiedAnalyzer.Tags.ContainsKey("modelType"), + "Copied analyzer should contain modelType tag"); + Assert.AreEqual("in_development", copiedAnalyzer.Tags["modelType"], + $"Copied analyzer should have same tag value 'in_development', but got '{copiedAnalyzer.Tags["modelType"]}'"); + Console.WriteLine($"Tags copied: modelType={copiedAnalyzer.Tags["modelType"]}"); + + // Verify tag counts match + Assert.AreEqual(sourceAnalyzerInfo.Tags.Count, copiedAnalyzer.Tags.Count, + $"Copied analyzer should have same number of tags ({sourceAnalyzerInfo.Tags.Count}), but got {copiedAnalyzer.Tags.Count}"); + Console.WriteLine($"Tag count matches: {copiedAnalyzer.Tags.Count}"); + + // ========== Verify Config ========== + Console.WriteLine("\nVerifying config..."); + + Assert.IsNotNull(copiedAnalyzer.Config, "Copied analyzer should have config"); + Assert.IsNotNull(sourceAnalyzerInfo.Config, "Source analyzer should have config"); + + if (sourceAnalyzerInfo.Config.EnableFormula.HasValue && copiedAnalyzer.Config.EnableFormula.HasValue) + { + Assert.AreEqual(sourceAnalyzerInfo.Config.EnableFormula.Value, copiedAnalyzer.Config.EnableFormula.Value, + "EnableFormula should match"); + Console.WriteLine($" EnableFormula: {copiedAnalyzer.Config.EnableFormula.Value}"); + } + + if (sourceAnalyzerInfo.Config.EnableLayout.HasValue && copiedAnalyzer.Config.EnableLayout.HasValue) + { + Assert.AreEqual(sourceAnalyzerInfo.Config.EnableLayout.Value, copiedAnalyzer.Config.EnableLayout.Value, + "EnableLayout should match"); + Console.WriteLine($" EnableLayout: {copiedAnalyzer.Config.EnableLayout.Value}"); + } + + if (sourceAnalyzerInfo.Config.EnableOcr.HasValue && copiedAnalyzer.Config.EnableOcr.HasValue) + { + Assert.AreEqual(sourceAnalyzerInfo.Config.EnableOcr.Value, copiedAnalyzer.Config.EnableOcr.Value, + "EnableOcr should match"); + Console.WriteLine($" EnableOcr: {copiedAnalyzer.Config.EnableOcr.Value}"); + } + + Console.WriteLine("Config copied correctly"); + + // ========== Verify Models ========== + Console.WriteLine("\nšŸ¤– Verifying models..."); + + Assert.IsNotNull(copiedAnalyzer.Models, "Copied analyzer should have models"); + Assert.IsNotNull(sourceAnalyzerInfo.Models, "Source analyzer should have models"); + Assert.AreEqual(sourceAnalyzerInfo.Models.Count, copiedAnalyzer.Models.Count, + $"Copied analyzer should have same number of models ({sourceAnalyzerInfo.Models.Count}), but got {copiedAnalyzer.Models.Count}"); + + if (sourceAnalyzerInfo.Models.ContainsKey("completion") && copiedAnalyzer.Models.ContainsKey("completion")) + { + Assert.AreEqual(sourceAnalyzerInfo.Models["completion"], copiedAnalyzer.Models["completion"], + "Completion model should match"); + Console.WriteLine($"Models copied: completion={copiedAnalyzer.Models["completion"]}"); + } + + // ========== Summary ========== + Console.WriteLine($"\nAnalyzer copy verification completed successfully:"); + Console.WriteLine($" Source: {sourceAnalyzerId}"); + Console.WriteLine($" Target: {targetAnalyzerId}"); + Console.WriteLine($" Base analyzer: {copiedAnalyzer.BaseAnalyzerId}"); + Console.WriteLine($" Description: {copiedAnalyzer.Description}"); + Console.WriteLine($" Fields: {copiedAnalyzer.FieldSchema.Fields.Count}"); + Console.WriteLine($" Tags: {copiedAnalyzer.Tags.Count}"); + Console.WriteLine($" Models: {copiedAnalyzer.Models.Count}"); + Console.WriteLine($" All properties verified:"); + #endregion + + // Step 3: Update the target analyzer with a production tag + // Step 4: Get the target analyzer again to verify the update + #region Snippet:ContentUnderstandingUpdateAndVerifyAnalyzer +#if SNIPPET + // Get the target analyzer first to get its BaseAnalyzerId + var targetResponse = await client.GetAnalyzerAsync(targetAnalyzerId); + ContentAnalyzer targetAnalyzer = targetResponse.Value; + + // Update the target analyzer with a production tag + var updatedAnalyzer = new ContentAnalyzer + { + BaseAnalyzerId = targetAnalyzer.BaseAnalyzerId + }; + updatedAnalyzer.Tags["modelType"] = "model_in_production"; + + await client.UpdateAnalyzerAsync(targetAnalyzerId, updatedAnalyzer); + + // Get the target analyzer again to verify the update + var updatedResponse = await client.GetAnalyzerAsync(targetAnalyzerId); + ContentAnalyzer updatedTargetAnalyzer = updatedResponse.Value; + Console.WriteLine($"Updated target analyzer description: {updatedTargetAnalyzer.Description}"); + Console.WriteLine($"Updated target analyzer tag: {updatedTargetAnalyzer.Tags["modelType"]}"); +#else + // Get the target analyzer first to get its BaseAnalyzerId + var targetResponse = await client.GetAnalyzerAsync(targetAnalyzerId); + ContentAnalyzer targetAnalyzer = targetResponse.Value; + + // Update the target analyzer with a production tag + var updatedAnalyzer = new ContentAnalyzer + { + BaseAnalyzerId = targetAnalyzer.BaseAnalyzerId + }; + updatedAnalyzer.Tags["modelType"] = "model_in_production"; + + await client.UpdateAnalyzerAsync(targetAnalyzerId, updatedAnalyzer); + + // Get the target analyzer again to verify the update + var updatedResponse = await client.GetAnalyzerAsync(targetAnalyzerId); + ContentAnalyzer updatedTargetAnalyzer = updatedResponse.Value; + Console.WriteLine($"Updated target analyzer description: {updatedTargetAnalyzer.Description}"); + Console.WriteLine($"Updated target analyzer tag: {updatedTargetAnalyzer.Tags["modelType"]}"); +#endif + #endregion + + #region Assertion:ContentUnderstandingUpdateAndVerifyAnalyzer + Console.WriteLine("\nšŸ”„ Analyzer Update Verification:"); + + // ========== Verify Target Retrieval Before Update ========== + Assert.IsNotNull(targetResponse, "Target analyzer response should not be null"); + Assert.IsTrue(targetResponse.HasValue, "Target analyzer response should have a value"); + Assert.IsNotNull(targetAnalyzer, "Target analyzer should not be null"); + Console.WriteLine($"Target analyzer retrieved before update"); + + // Verify raw response + var targetRawResponse = targetResponse.GetRawResponse(); + Assert.IsNotNull(targetRawResponse, "Raw response should not be null"); + Assert.AreEqual(200, targetRawResponse.Status, $"Response status should be 200, but was {targetRawResponse.Status}"); + + // ========== Verify Update Object ========== + Assert.IsNotNull(updatedAnalyzer, "Updated analyzer object should not be null"); + Assert.AreEqual(targetAnalyzer.BaseAnalyzerId, updatedAnalyzer.BaseAnalyzerId, + "Updated analyzer should preserve base analyzer ID"); + Assert.IsTrue(updatedAnalyzer.Tags.ContainsKey("modelType"), "Updated analyzer should have modelType tag"); + Assert.AreEqual("model_in_production", updatedAnalyzer.Tags["modelType"], + "Updated analyzer should have new tag value"); + Console.WriteLine("Update object created with new tag value"); + + // ========== Verify Updated Retrieval ========== + Assert.IsNotNull(updatedResponse, "Updated analyzer response should not be null"); + Assert.IsTrue(updatedResponse.HasValue, "Updated analyzer response should have a value"); + Assert.IsNotNull(updatedTargetAnalyzer, "Updated target analyzer should not be null"); + Console.WriteLine($"Updated analyzer retrieved successfully"); + + // Verify raw response + var updatedRawResponse = updatedResponse.GetRawResponse(); + Assert.IsNotNull(updatedRawResponse, "Raw response should not be null"); + Assert.AreEqual(200, updatedRawResponse.Status, $"Response status should be 200, but was {updatedRawResponse.Status}"); + Console.WriteLine($"Response status: {updatedRawResponse.Status}"); + + // ========== Verify Description Preserved ========== + Console.WriteLine("\nVerifying preserved properties..."); + + Assert.IsNotNull(updatedTargetAnalyzer.Description, "Description should not be null"); + Assert.AreEqual("Source analyzer for copying", updatedTargetAnalyzer.Description, + $"Description should be preserved from source, but got '{updatedTargetAnalyzer.Description}'"); + Console.WriteLine($"Description preserved: '{updatedTargetAnalyzer.Description}'"); + + // ========== Verify Tag Updated ========== + Console.WriteLine("\nVerifying tag update..."); + + Assert.IsNotNull(updatedTargetAnalyzer.Tags, "Tags should not be null"); + Assert.IsTrue(updatedTargetAnalyzer.Tags.ContainsKey("modelType"), + "Updated analyzer should contain modelType tag"); + Assert.AreEqual("model_in_production", updatedTargetAnalyzer.Tags["modelType"], + $"Tag should be updated to 'model_in_production', but got '{updatedTargetAnalyzer.Tags["modelType"]}'"); + Assert.AreNotEqual("in_development", updatedTargetAnalyzer.Tags["modelType"], + "Tag should no longer be 'in_development'"); + Console.WriteLine($"Tag updated: in_development → model_in_production"); + + // ========== Verify Field Schema Preserved ========== + Console.WriteLine("\nVerifying field schema preservation..."); + + Assert.IsNotNull(updatedTargetAnalyzer.FieldSchema, + "Field schema should still exist after update"); + Assert.AreEqual("company_schema", updatedTargetAnalyzer.FieldSchema.Name, + "Field schema name should be preserved"); + Assert.AreEqual(2, updatedTargetAnalyzer.FieldSchema.Fields.Count, + $"Should still have 2 fields after update, but got {updatedTargetAnalyzer.FieldSchema.Fields.Count}"); + Assert.IsTrue(updatedTargetAnalyzer.FieldSchema.Fields.ContainsKey("company_name"), + "company_name field should still exist"); + Assert.IsTrue(updatedTargetAnalyzer.FieldSchema.Fields.ContainsKey("total_amount"), + "total_amount field should still exist"); + Console.WriteLine($"Field schema preserved: {updatedTargetAnalyzer.FieldSchema.Fields.Count} fields"); + // ========== Verify Base Analyzer ID Preserved ========== + Console.WriteLine("\nšŸ”— Verifying base analyzer preservation..."); + + Assert.IsNotNull(updatedTargetAnalyzer.BaseAnalyzerId, "Base analyzer ID should not be null"); + Assert.AreEqual(sourceAnalyzerInfo.BaseAnalyzerId, updatedTargetAnalyzer.BaseAnalyzerId, + $"Base analyzer ID should be preserved, but got '{updatedTargetAnalyzer.BaseAnalyzerId}' instead of '{sourceAnalyzerInfo.BaseAnalyzerId}'"); + Assert.AreEqual("prebuilt-document", updatedTargetAnalyzer.BaseAnalyzerId, + "Base analyzer ID should still be 'prebuilt-document'"); + Console.WriteLine($"Base analyzer preserved: {updatedTargetAnalyzer.BaseAnalyzerId}"); + + // ========== Verify Config Preserved ========== + Console.WriteLine("\nVerifying config preservation..."); + + Assert.IsNotNull(updatedTargetAnalyzer.Config, "Config should still exist after update"); + Console.WriteLine("Config preserved"); + + // ========== Verify Models Preserved ========== + Console.WriteLine("\nšŸ¤– Verifying models preservation..."); + + Assert.IsNotNull(updatedTargetAnalyzer.Models, "Models should still exist after update"); + if (updatedTargetAnalyzer.Models.ContainsKey("completion")) + { + Assert.AreEqual("gpt-4.1", updatedTargetAnalyzer.Models["completion"], + "Completion model should be preserved"); + Console.WriteLine($"Models preserved: completion={updatedTargetAnalyzer.Models["completion"]}"); + } + + // ========== Compare Before and After ========== + Console.WriteLine("\nUpdate comparison:"); + Console.WriteLine($" Property | Before | After"); + Console.WriteLine($" ----------------- | ----------------- | -----------------"); + Console.WriteLine($" Description | (preserved) | {updatedTargetAnalyzer.Description}"); + Console.WriteLine($" Tag modelType | in_development | model_in_production"); + Console.WriteLine($" Fields | (preserved) | {updatedTargetAnalyzer.FieldSchema.Fields.Count}"); + Console.WriteLine($" Base analyzer | (preserved) | {updatedTargetAnalyzer.BaseAnalyzerId}"); + Console.WriteLine($" Config | (preserved) | Yes"); + Console.WriteLine($" Models | (preserved) | {updatedTargetAnalyzer.Models.Count}"); + + // ========== Summary ========== + Console.WriteLine($"\nAnalyzer update verification completed successfully:"); + Console.WriteLine($" Analyzer ID: {targetAnalyzerId}"); + Console.WriteLine($" Description: Preserved"); + Console.WriteLine($" Tag updated: in_development → model_in_production"); + Console.WriteLine($" Field schema: Preserved ({updatedTargetAnalyzer.FieldSchema.Fields.Count} fields)"); + Console.WriteLine($" Base analyzer: Preserved ({updatedTargetAnalyzer.BaseAnalyzerId})"); + Console.WriteLine($" Config: Preserved"); + Console.WriteLine($" Models: Preserved ({updatedTargetAnalyzer.Models.Count})"); + #endregion + } + finally + { + // Clean up: delete both analyzers + #region Snippet:ContentUnderstandingDeleteCopiedAnalyzers +#if SNIPPET + try + { + await client.DeleteAnalyzerAsync(sourceAnalyzerId); + Console.WriteLine($"Source analyzer '{sourceAnalyzerId}' deleted successfully."); + } + catch + { + // Ignore cleanup errors + } + + try + { + await client.DeleteAnalyzerAsync(targetAnalyzerId); + Console.WriteLine($"Target analyzer '{targetAnalyzerId}' deleted successfully."); + } + catch + { + // Ignore cleanup errors + } +#else + try + { + await client.DeleteAnalyzerAsync(sourceAnalyzerId); + Console.WriteLine($"Source analyzer '{sourceAnalyzerId}' deleted successfully."); + } + catch + { + // Ignore cleanup errors + } + + try + { + await client.DeleteAnalyzerAsync(targetAnalyzerId); + Console.WriteLine($"Target analyzer '{targetAnalyzerId}' deleted successfully."); + } + catch + { + // Ignore cleanup errors + } +#endif + #endregion + } + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample15_GrantCopyAuth.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample15_GrantCopyAuth.cs new file mode 100644 index 000000000000..f942a9623a8f --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/Sample15_GrantCopyAuth.cs @@ -0,0 +1,341 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +#nullable enable + +using System; +using System.Collections.Generic; +using System.Threading.Tasks; +using Azure; +using Azure.AI.ContentUnderstanding; +using Azure.AI.ContentUnderstanding.Tests; +using Azure.Core; +using Azure.Core.TestFramework; +using Azure.Identity; +using NUnit.Framework; + +namespace Azure.AI.ContentUnderstanding.Samples +{ + public partial class ContentUnderstandingSamples + { + [RecordedTest] + public async Task GrantCopyAuthAsync() + { +#if !SNIPPET + string endpoint = TestEnvironment.Endpoint; + var options = InstrumentClientOptions(new ContentUnderstandingClientOptions()); + var sourceClient = InstrumentClient(new ContentUnderstandingClient(new Uri(endpoint), TestEnvironment.Credential, options)); +#endif + + #region Snippet:ContentUnderstandingGrantCopyAuth +#if SNIPPET + // Get source endpoint from configuration + // Note: configuration is already loaded in Main method + string sourceEndpoint = "https://source-resource.services.ai.azure.com/"; + string? sourceKey = "optional-source-api-key"; // Set to null to use DefaultAzureCredential + + // Create source client + ContentUnderstandingClient sourceClient = !string.IsNullOrEmpty(sourceKey) + ? new ContentUnderstandingClient(new Uri(sourceEndpoint), new AzureKeyCredential(sourceKey)) + : new ContentUnderstandingClient(new Uri(sourceEndpoint), new DefaultAzureCredential()); + + // Source analyzer ID (must already exist in the source resource) + string sourceAnalyzerId = "my_source_analyzer_id_in_the_source_resource"; + // Target analyzer ID (will be created during copy) + string targetAnalyzerId = "my_target_analyzer_id_in_the_target_resource"; + + // Get source and target resource information from configuration + string sourceResourceId = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CognitiveServices/accounts/{name}"; + string sourceRegion = "eastus"; // Replace with actual source region + string targetEndpoint = "https://target-resource.services.ai.azure.com/"; + string targetResourceId = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CognitiveServices/accounts/{name}"; + string targetRegion = "westus"; // Replace with actual target region + string? targetKey = "optional-target-api-key"; // Set to null to use DefaultAzureCredential + + // Create target client + ContentUnderstandingClient targetClient = !string.IsNullOrEmpty(targetKey) + ? new ContentUnderstandingClient(new Uri(targetEndpoint), new AzureKeyCredential(targetKey)) + : new ContentUnderstandingClient(new Uri(targetEndpoint), new DefaultAzureCredential()); +#else + // For testing, we'll use the same endpoint for both source and target + // In production, these would be different resources + string defaultSourceId = $"test_analyzer_source_{Recording.Random.NewGuid().ToString("N")}"; + string defaultTargetId = $"test_analyzer_target_{Recording.Random.NewGuid().ToString("N")}"; + string sourceAnalyzerId = Recording.GetVariable("grantCopySourceAnalyzerId", defaultSourceId) ?? defaultSourceId; + string targetAnalyzerId = Recording.GetVariable("grantCopyTargetAnalyzerId", defaultTargetId) ?? defaultTargetId; + + // Get source and target resource information from test environment + // Note: For testing, we use the same endpoint for both source and target + // In production, these would be different resources + string sourceResourceId = TestEnvironment.SourceResourceId ?? throw new InvalidOperationException("SOURCE_RESOURCE_ID is required"); + string sourceRegion = TestEnvironment.SourceRegion ?? throw new InvalidOperationException("SOURCE_REGION is required"); + string targetEndpoint = TestEnvironment.TargetEndpoint; + string targetResourceId = TestEnvironment.TargetResourceId ?? throw new InvalidOperationException("TARGET_RESOURCE_ID is required"); + string targetRegion = TestEnvironment.TargetRegion ?? throw new InvalidOperationException("TARGET_REGION is required"); + string? targetKey = TestEnvironment.TargetKey; + + // Create target client + var targetClientOptions = InstrumentClientOptions(new ContentUnderstandingClientOptions()); + ContentUnderstandingClient targetClient = !string.IsNullOrEmpty(targetKey) + ? InstrumentClient(new ContentUnderstandingClient(new Uri(targetEndpoint), new AzureKeyCredential(targetKey!), targetClientOptions)) + : InstrumentClient(new ContentUnderstandingClient(new Uri(targetEndpoint), TestEnvironment.Credential, targetClientOptions)); +#endif + + // Step 1: Create the source analyzer + var sourceConfig = new ContentAnalyzerConfig + { + EnableFormula = false, + EnableLayout = true, + EnableOcr = true, + EstimateFieldSourceAndConfidence = true, + ReturnDetails = true + }; + + var sourceFieldSchema = new ContentFieldSchema( + new Dictionary + { + ["company_name"] = new ContentFieldDefinition + { + Type = ContentFieldType.String, + Method = GenerationMethod.Extract, + Description = "Name of the company" + }, + ["total_amount"] = new ContentFieldDefinition + { + Type = ContentFieldType.Number, + Method = GenerationMethod.Extract, + Description = "Total amount on the document" + } + }) + { + Name = "company_schema", + Description = "Schema for extracting company information" + }; + + var sourceAnalyzer = new ContentAnalyzer + { + BaseAnalyzerId = "prebuilt-document", + Description = "Source analyzer for cross-resource copying", + Config = sourceConfig, + FieldSchema = sourceFieldSchema + }; + sourceAnalyzer.Models.Add("completion", "gpt-4.1"); + + var createOperation = await sourceClient.CreateAnalyzerAsync( + WaitUntil.Completed, + sourceAnalyzerId, + sourceAnalyzer); + var sourceResult = createOperation.Value; + Console.WriteLine($"Source analyzer '{sourceAnalyzerId}' created successfully!"); + + try + { + // Step 2: Grant copy authorization + var copyAuth = await sourceClient.GrantCopyAuthorizationAsync( + sourceAnalyzerId, + targetResourceId, + targetRegion); + + Console.WriteLine("Copy authorization granted successfully!"); + Console.WriteLine($" Target Azure Resource ID: {copyAuth.Value.TargetAzureResourceId}"); + Console.WriteLine($" Target Region: {targetRegion}"); + Console.WriteLine($" Expires at: {copyAuth.Value.ExpiresAt}"); + + // Step 3: Copy analyzer to target resource + var copyOperation = await targetClient.CopyAnalyzerAsync( + WaitUntil.Completed, + targetAnalyzerId, + sourceAnalyzerId, + sourceResourceId, + sourceRegion); + + var targetResult = copyOperation.Value; + Console.WriteLine($"Target analyzer '{targetAnalyzerId}' copied successfully to target resource!"); + Console.WriteLine($"Target analyzer description: {targetResult.Description}"); + +#if !SNIPPET + #region Assertion:ContentUnderstandingGrantCopyAuthorization + Console.WriteLine("\nšŸ” Copy Authorization Grant Verification:"); + + // Verify copyAuth response + Assert.IsNotNull(copyAuth, "Copy authorization response should not be null"); + Assert.IsTrue(copyAuth.HasValue, "Copy authorization should have a value"); + Assert.IsNotNull(copyAuth.Value, "Copy authorization value should not be null"); + Console.WriteLine("Copy authorization response received"); + + // Verify raw response + var copyAuthRawResponse = copyAuth.GetRawResponse(); + Assert.IsNotNull(copyAuthRawResponse, "Raw response should not be null"); + Assert.IsTrue(copyAuthRawResponse.Status >= 200 && copyAuthRawResponse.Status < 300, + $"Response status should be successful, but was {copyAuthRawResponse.Status}"); + Console.WriteLine($"Response status: {copyAuthRawResponse.Status}"); + + // Verify target resource ID + Assert.IsNotNull(copyAuth.Value.TargetAzureResourceId, "Target Azure resource ID should not be null"); + Assert.IsFalse(string.IsNullOrWhiteSpace(copyAuth.Value.TargetAzureResourceId), + "Target Azure resource ID should not be empty"); + Assert.AreEqual(targetResourceId, copyAuth.Value.TargetAzureResourceId, + $"Target resource ID should match, but got '{copyAuth.Value.TargetAzureResourceId}' instead of '{targetResourceId}'"); + Console.WriteLine($"Target Azure Resource ID verified: {copyAuth.Value.TargetAzureResourceId}"); + // Note: TargetRegion is not available in the CopyAuthorization response + // The target region is tracked separately in the targetRegion variable + Console.WriteLine($"Target region (tracked): {targetRegion}"); + + // Verify expiration time + var expiresAt = copyAuth.Value.ExpiresAt; + // Only verify expiration time in live/record mode, not in playback mode + // (recorded expiration times may be in the past during playback) + if (Mode != RecordedTestMode.Playback) + { + var now = DateTimeOffset.UtcNow; + + Assert.IsTrue(expiresAt > now, + $"Expiration time should be in the future, but expires at {expiresAt} (now: {now})"); + + // Calculate time until expiration + var timeUntilExpiration = expiresAt - now; + Assert.IsTrue(timeUntilExpiration.TotalMinutes > 0, + "Should have positive time until expiration"); + + Console.WriteLine($"Expiration time verified: {expiresAt:yyyy-MM-dd HH:mm:ss} UTC"); + Console.WriteLine($" Time until expiration: {timeUntilExpiration.TotalMinutes:F2} minutes"); + + // Verify expiration is reasonable (typically several hours) + if (timeUntilExpiration.TotalHours < 24) + { + Console.WriteLine($" āš ļø Note: Authorization expires in less than 24 hours"); + } + } + else + { + Console.WriteLine($"Expiration time: {expiresAt:yyyy-MM-dd HH:mm:ss} UTC (from recorded response)"); + } + + // Summary + Console.WriteLine($"\nCopy authorization granted successfully:"); + Console.WriteLine($" Source analyzer: {sourceAnalyzerId}"); + Console.WriteLine($" Target resource: {copyAuth.Value.TargetAzureResourceId}"); + Console.WriteLine($" Target region: {targetRegion}"); + Console.WriteLine($" Expires: {copyAuth.Value.ExpiresAt:yyyy-MM-dd HH:mm:ss} UTC"); + Console.WriteLine($" Authorization ready for cross-resource copy"); + #endregion +#endif + } + finally + { + // Clean up: delete both analyzers + try + { + await sourceClient.DeleteAnalyzerAsync(sourceAnalyzerId); + Console.WriteLine($"Source analyzer '{sourceAnalyzerId}' deleted successfully."); + } + catch + { + // Ignore cleanup errors + } + + try + { + await targetClient.DeleteAnalyzerAsync(targetAnalyzerId); + Console.WriteLine($"Target analyzer '{targetAnalyzerId}' deleted successfully."); + } + catch + { + // Ignore cleanup errors + } + } + #endregion + +#if !SNIPPET + #region Assertion:ContentUnderstandingCreateSourceAnalyzerForCopy + Console.WriteLine("šŸ“‹ Source Analyzer Creation Verification (For Cross-Resource Copy):"); + + // Verify analyzer IDs + Assert.IsNotNull(sourceAnalyzerId, "Source analyzer ID should not be null"); + Assert.IsFalse(string.IsNullOrWhiteSpace(sourceAnalyzerId), "Source analyzer ID should not be empty"); + Assert.IsNotNull(targetAnalyzerId, "Target analyzer ID should not be null"); + Assert.IsFalse(string.IsNullOrWhiteSpace(targetAnalyzerId), "Target analyzer ID should not be empty"); + Assert.AreNotEqual(sourceAnalyzerId, targetAnalyzerId, "Source and target IDs should be different"); + Console.WriteLine($"Source analyzer ID: {sourceAnalyzerId}"); + Console.WriteLine($"Target analyzer ID: {targetAnalyzerId}"); + + // Verify resource information + Assert.IsNotNull(sourceResourceId, "Source resource ID should not be null"); + Assert.IsFalse(string.IsNullOrWhiteSpace(sourceResourceId), "Source resource ID should not be empty"); + Assert.IsNotNull(sourceRegion, "Source region should not be null"); + Assert.IsFalse(string.IsNullOrWhiteSpace(sourceRegion), "Source region should not be empty"); + Assert.IsNotNull(targetResourceId, "Target resource ID should not be null"); + Assert.IsFalse(string.IsNullOrWhiteSpace(targetResourceId), "Target resource ID should not be empty"); + Assert.IsNotNull(targetRegion, "Target region should not be null"); + Assert.IsFalse(string.IsNullOrWhiteSpace(targetRegion), "Target region should not be empty"); + Assert.IsNotNull(targetEndpoint, "Target endpoint should not be null"); + Assert.IsFalse(string.IsNullOrWhiteSpace(targetEndpoint), "Target endpoint should not be empty"); + + Console.WriteLine($"Source resource: {sourceResourceId}"); + Console.WriteLine($"Source region: {sourceRegion}"); + Console.WriteLine($"Target resource: {targetResourceId}"); + Console.WriteLine($"Target region: {targetRegion}"); + Console.WriteLine($"Target endpoint: {targetEndpoint}"); + + // Verify clients + Assert.IsNotNull(sourceClient, "Source client should not be null"); + Assert.IsNotNull(targetClient, "Target client should not be null"); + Console.WriteLine("Source and target clients created"); + + // Verify source analyzer configuration + Assert.IsNotNull(sourceConfig, "Source config should not be null"); + Assert.AreEqual(false, sourceConfig.EnableFormula, "EnableFormula should be false"); + Assert.AreEqual(true, sourceConfig.EnableLayout, "EnableLayout should be true"); + Assert.AreEqual(true, sourceConfig.EnableOcr, "EnableOcr should be true"); + Assert.AreEqual(true, sourceConfig.EstimateFieldSourceAndConfidence, "EstimateFieldSourceAndConfidence should be true"); + Assert.AreEqual(true, sourceConfig.ReturnDetails, "ReturnDetails should be true"); + Console.WriteLine("Source config verified"); + + // Verify source field schema + Assert.IsNotNull(sourceFieldSchema, "Source field schema should not be null"); + Assert.AreEqual("company_schema", sourceFieldSchema.Name, "Field schema name should match"); + Assert.AreEqual("Schema for extracting company information", sourceFieldSchema.Description, "Field schema description should match"); + Assert.AreEqual(2, sourceFieldSchema.Fields.Count, "Should have 2 fields"); + Assert.IsTrue(sourceFieldSchema.Fields.ContainsKey("company_name"), "Should contain company_name field"); + Assert.IsTrue(sourceFieldSchema.Fields.ContainsKey("total_amount"), "Should contain total_amount field"); + Console.WriteLine($"Source field schema verified: {sourceFieldSchema.Name} ({sourceFieldSchema.Fields.Count} fields)"); + + // Verify source analyzer object + Assert.IsNotNull(sourceAnalyzer, "Source analyzer object should not be null"); + Assert.AreEqual("prebuilt-document", sourceAnalyzer.BaseAnalyzerId, "Base analyzer ID should match"); + Assert.AreEqual("Source analyzer for cross-resource copying", sourceAnalyzer.Description, "Description should match"); + Assert.IsTrue(sourceAnalyzer.Models.ContainsKey("completion"), "Should have completion model"); + Assert.AreEqual("gpt-4.1", sourceAnalyzer.Models["completion"], "Completion model should be gpt-4.1"); + Console.WriteLine("Source analyzer object verified"); + + // Verify create operation + Assert.IsNotNull(createOperation, "Create operation should not be null"); + Assert.IsTrue(createOperation.HasCompleted, "Operation should be completed"); + Assert.IsTrue(createOperation.HasValue, "Operation should have a value"); + Assert.IsNotNull(createOperation.GetRawResponse(), "Create operation should have a raw response"); + Assert.IsTrue(createOperation.GetRawResponse().Status >= 200 && createOperation.GetRawResponse().Status < 300, + $"Response status should be successful, but was {createOperation.GetRawResponse().Status}"); + Console.WriteLine($"Create operation status: {createOperation.GetRawResponse().Status}"); + + // Verify source result + Assert.IsNotNull(sourceResult, "Source analyzer result should not be null"); + Assert.AreEqual("prebuilt-document", sourceResult.BaseAnalyzerId, "Base analyzer ID should match"); + Assert.AreEqual("Source analyzer for cross-resource copying", sourceResult.Description, "Description should match"); + Assert.IsNotNull(sourceResult.Config, "Config should not be null"); + Assert.IsNotNull(sourceResult.FieldSchema, "Field schema should not be null"); + Assert.AreEqual(2, sourceResult.FieldSchema.Fields.Count, "Should have 2 fields"); + Assert.IsNotNull(sourceResult.Models, "Models should not be null"); + Assert.IsTrue(sourceResult.Models.ContainsKey("completion"), "Should have completion model"); + Console.WriteLine($"Source analyzer created: '{sourceAnalyzerId}'"); + + Console.WriteLine($"\nSource analyzer creation completed:"); + Console.WriteLine($" ID: {sourceAnalyzerId}"); + Console.WriteLine($" Base: {sourceResult.BaseAnalyzerId}"); + Console.WriteLine($" Fields: {sourceResult.FieldSchema.Fields.Count}"); + Console.WriteLine($" Models: {sourceResult.Models.Count}"); + Console.WriteLine($" Ready for cross-resource copy"); + #endregion +#endif + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/SampleFiles/mixed_financial_docs.pdf b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/SampleFiles/mixed_financial_docs.pdf new file mode 100644 index 000000000000..2c6d57818e11 Binary files /dev/null and b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/SampleFiles/mixed_financial_docs.pdf differ diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/SampleFiles/sample_document_features.pdf b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/SampleFiles/sample_document_features.pdf new file mode 100755 index 000000000000..9f47030c0377 Binary files /dev/null and b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/SampleFiles/sample_document_features.pdf differ diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/SampleFiles/sample_invoice.pdf b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/SampleFiles/sample_invoice.pdf new file mode 100644 index 000000000000..812bcd9b30f3 Binary files /dev/null and b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/SampleFiles/sample_invoice.pdf differ diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/SampleSnippets.cs b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/SampleSnippets.cs new file mode 100644 index 000000000000..c8883bfefcd8 --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tests/samples/SampleSnippets.cs @@ -0,0 +1,46 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using Azure; +using Azure.AI.ContentUnderstanding; +using Azure.AI.ContentUnderstanding.Tests; +using Azure.Core; +using Azure.Core.TestFramework; +using Azure.Identity; + +namespace Azure.AI.ContentUnderstanding.Samples +{ + public partial class ContentUnderstandingSamples + { + [RecordedTest] + public void CreateContentUnderstandingClient() + { + #region Snippet:CreateContentUnderstandingClient +#if SNIPPET + string endpoint = ""; + var credential = new DefaultAzureCredential(); +#else + string endpoint = TestEnvironment.Endpoint; + var credential = TestEnvironment.Credential; +#endif + var client = new ContentUnderstandingClient(new Uri(endpoint), credential); + #endregion + } + + // Method kept for snippet extraction, but not run as a test + public void CreateContentUnderstandingClientApiKey() + { + #region Snippet:CreateContentUnderstandingClientApiKey +#if SNIPPET + string endpoint = ""; + string apiKey = ""; +#else + string endpoint = TestEnvironment.Endpoint; + string apiKey = TestEnvironment.ApiKey; +#endif + var client = new ContentUnderstandingClient(new Uri(endpoint), new AzureKeyCredential(apiKey)); + #endregion + } + } +} diff --git a/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tsp-location.yaml b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tsp-location.yaml new file mode 100644 index 000000000000..78919e835b1e --- /dev/null +++ b/sdk/contentunderstanding/Azure.AI.ContentUnderstanding/tsp-location.yaml @@ -0,0 +1,6 @@ +directory: specification/ai/ContentUnderstanding +commit: e14eec8796b4d481a942a41e103881589ec648d8 +repo: Azure/azure-rest-api-specs +additionalDirectories: + +emitterPackageJsonPath: eng/azure-typespec-http-client-csharp-emitter-package.json diff --git a/sdk/contentunderstanding/ci.yml b/sdk/contentunderstanding/ci.yml new file mode 100644 index 000000000000..77e39cb656c5 --- /dev/null +++ b/sdk/contentunderstanding/ci.yml @@ -0,0 +1,31 @@ +# NOTE: Please refer to https://aka.ms/azsdk/engsys/ci-yaml before editing this file. + +trigger: + branches: + include: + - main + - hotfix/* + - release/* + paths: + include: + - sdk/contentunderstanding/ + +pr: + branches: + include: + - main + - feature/* + - hotfix/* + - release/* + paths: + include: + - sdk/contentunderstanding/ + +extends: + template: /eng/pipelines/templates/stages/archetype-sdk-client.yml + parameters: + ServiceDirectory: contentunderstanding + ArtifactName: packages + Artifacts: + - name: Azure.AI.ContentUnderstanding + safeName: AzureAIContentUnderstanding diff --git a/sdk/contentunderstanding/test-resources.json b/sdk/contentunderstanding/test-resources.json new file mode 100644 index 000000000000..55bfea823487 --- /dev/null +++ b/sdk/contentunderstanding/test-resources.json @@ -0,0 +1,85 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentContentUnderstanding.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "baseName": { + "type": "string", + "defaultValue": "[resourceGroup().name]", + "metadata": { + "description": "The base resource name." + } + }, + "tenantId": { + "type": "string", + "defaultValue": "72f988bf-86f1-41af-91ab-2d7cd011db47", + "metadata": { + "description": "The tenant ID to which the application and resources belong." + } + }, + "testApplicationOid": { + "type": "string", + "defaultValue": "b3653439-8136-4cd5-aac3-2a9460871ca6", + "metadata": { + "description": "The client OID to grant access to test resources." + } + }, + "location": { + "type": "string", + "defaultValue": "[resourceGroup().location]", + "metadata": { + "description": "The location of the resource. By default, this is the same as the resource group." + } + } + }, + "variables": { + "secretValue": "Very secret value" + }, + "resources": [ + { + "type": "Microsoft.KeyVault/vaults", + "apiVersion": "2016-10-01", + "name": "[parameters('baseName')]", + "location": "[parameters('location')]", + "properties": { + "sku": { + "family": "A", + "name": "premium" + }, + "tenantId": "[parameters('tenantId')]", + "accessPolicies": [ + { + "tenantId": "[parameters('tenantId')]", + "objectId": "[parameters('testApplicationOid')]", + "permissions": { + "secrets": [ + "get" + ] + } + } + ] + } + }, + { + "type": "Microsoft.KeyVault/vaults/secrets", + "name": "[concat(parameters('baseName'), '/TestSecret')]", + "apiVersion": "2016-10-01", + "location": "[parameters('location')]", + "dependsOn": [ + "[resourceId('Microsoft.KeyVault/vaults', parameters('baseName'))]" + ], + "properties": { + "value": "[variables('secretValue')]" + } + } + ], + "outputs": { + "KEYVAULT_URL": { + "type": "string", + "value": "[reference(parameters('baseName')).vaultUri]" + }, + "KEYVAULT_SECRET": { + "type": "string", + "value": "[variables('secretValue')]" + } + } +} diff --git a/sdk/contentunderstanding/tests.yml b/sdk/contentunderstanding/tests.yml new file mode 100644 index 000000000000..e1290e99818a --- /dev/null +++ b/sdk/contentunderstanding/tests.yml @@ -0,0 +1,7 @@ +trigger: none + +extends: + template: /eng/pipelines/templates/stages/archetype-sdk-tests.yml + parameters: + ServiceDirectory: contentunderstanding + SupportedClouds: 'Public'