From 5d65097b429f28a3ea2e067c8dee5b5c2c4214aa Mon Sep 17 00:00:00 2001 From: speakeasybot Date: Fri, 12 Dec 2025 21:56:30 +0000 Subject: [PATCH 1/4] ## Python SDK Changes: * `unstructured_client.workflows.get_workflow()`: `response.reprocess_all` **Changed** **Breaking** :warning: * `unstructured_client.sources.create_source()`: * `request.create_source_connector.config` **Changed** **Breaking** :warning: * `response.config.[snowflake_source_connector_config].schema` **Changed** * `unstructured_client.general.partition()`: * `request.partition_parameters` **Changed** **Breaking** :warning: * `unstructured_client.destinations.create_destination()`: * `request.create_destination_connector.config` **Changed** **Breaking** :warning: * `response.config` **Changed** * `unstructured_client.workflows.update_workflow()`: * `request.update_workflow.template_id` **Added** * `response.reprocess_all` **Changed** **Breaking** :warning: * `unstructured_client.workflows.list_workflows()`: `response.[].reprocess_all` **Changed** **Breaking** :warning: * `unstructured_client.destinations.update_destination()`: * `request.update_destination_connector.config` **Changed** **Breaking** :warning: * `response.config` **Changed** * `unstructured_client.workflows.create_workflow()`: * `request.create_workflow.template_id` **Added** * `response.reprocess_all` **Changed** **Breaking** :warning: * `unstructured_client.sources.update_source()`: * `request.update_source_connector.config` **Changed** **Breaking** :warning: * `response.config.[snowflake_source_connector_config].schema` **Changed** * `unstructured_client.destinations.list_destinations()`: * `request.destination_type` **Changed** * `response.[].config` **Changed** * `unstructured_client.sources.list_sources()`: `response.[].config.[snowflake_source_connector_config].schema` **Changed** * `unstructured_client.sources.get_source()`: `response.config.[snowflake_source_connector_config].schema` **Changed** * `unstructured_client.templates.get_template()`: **Added** * `unstructured_client.jobs.list_jobs()`: `response.[].output_node_files.[]` **Changed** * `unstructured_client.jobs.download_job_output()`: * `request.node_id` **Changed** * `unstructured_client.jobs.create_job()`: **Added** * `unstructured_client.jobs.get_job()`: `response.output_node_files.[]` **Changed** * `unstructured_client.workflows.run_workflow()`: `response.output_node_files.[]` **Changed** * `unstructured_client.destinations.get_destination()`: `response.config` **Changed** * `unstructured_client.templates.list_templates()`: **Added** --- .gitignore | 1 + .speakeasy/gen.lock | 874 +++++++++++++++++- .speakeasy/workflow.lock | 14 +- README.md | 8 +- RELEASES.md | 11 + codeSamples.yaml | 26 +- docs/models/operations/createjobrequest.md | 9 + docs/models/operations/createjobresponse.md | 11 + .../operations/downloadjoboutputrequest.md | 12 +- docs/models/operations/gettemplaterequest.md | 9 + docs/models/operations/gettemplateresponse.md | 11 + .../models/operations/listtemplatesrequest.md | 8 + .../operations/listtemplatesresponse.md | 11 + docs/models/shared/astradbconnectorconfig.md | 1 + .../shared/astradbconnectorconfiginput.md | 1 + docs/models/shared/bodycreatejob.md | 9 + docs/models/shared/bodyrunworkflow.md | 6 +- .../shared/bodyrunworkflowinputfiles.md | 10 + docs/models/shared/config.md | 6 + docs/models/shared/createworkflow.md | 1 + .../destinationconnectorinformationconfig.md | 6 + .../models/shared/destinationconnectortype.md | 1 + docs/models/shared/nodefilemetadata.md | 4 +- docs/models/shared/partitionparameters.md | 3 +- .../snowflakedestinationconnectorconfig.md | 2 +- ...nowflakedestinationconnectorconfiginput.md | 2 +- .../shared/snowflakesourceconnectorconfig.md | 2 +- .../snowflakesourceconnectorconfiginput.md | 2 +- docs/models/shared/templatedetail.md | 15 + docs/models/shared/templatelistitem.md | 14 + docs/models/shared/templatenode.md | 14 + .../updatedestinationconnectorconfig.md | 6 + docs/models/shared/updateworkflow.md | 1 + docs/models/shared/vlmmodel.md | 23 - docs/models/shared/workflowinformation.md | 2 +- docs/models/shared/workflowjobtype.md | 3 +- docs/sdks/general/README.md | 2 +- docs/sdks/jobs/README.md | 45 + docs/sdks/templates/README.md | 91 ++ gen.yaml | 13 + poetry.lock | 2 +- pyproject.toml | 1 + src/unstructured_client/_version.py | 6 +- src/unstructured_client/jobs.py | 206 +++++ .../models/operations/__init__.py | 42 + .../models/operations/create_job.py | 93 ++ .../models/operations/download_job_output.py | 15 +- .../models/operations/get_template.py | 89 ++ .../models/operations/list_templates.py | 90 ++ .../models/shared/__init__.py | 56 +- .../models/shared/astradbconnectorconfig.py | 6 +- .../shared/astradbconnectorconfiginput.py | 10 +- .../models/shared/body_create_job.py | 84 ++ .../models/shared/body_run_workflow.py | 8 +- .../shared/createdestinationconnector.py | 30 +- .../models/shared/createworkflow.py | 5 + ...ricksvdtdestinationconnectorconfiginput.py | 2 +- .../databricksvolumesconnectorconfiginput.py | 2 +- .../shared/destinationconnectorinformation.py | 18 +- .../models/shared/destinationconnectortype.py | 1 + .../models/shared/nodefilemetadata.py | 6 + .../models/shared/partition_parameters.py | 44 +- .../snowflakedestinationconnectorconfig.py | 14 +- ...nowflakedestinationconnectorconfiginput.py | 14 +- .../shared/snowflakesourceconnectorconfig.py | 8 +- .../snowflakesourceconnectorconfiginput.py | 8 +- .../models/shared/templatedetail.py | 34 + .../models/shared/templatelistitem.py | 29 + .../models/shared/templatenode.py | 67 ++ .../shared/updatedestinationconnector.py | 30 +- .../models/shared/updateworkflow.py | 5 + .../models/shared/workflowinformation.py | 8 +- .../models/shared/workflowjobtype.py | 1 + src/unstructured_client/sdk.py | 3 + src/unstructured_client/templates.py | 411 ++++++++ 75 files changed, 2551 insertions(+), 187 deletions(-) create mode 100644 docs/models/operations/createjobrequest.md create mode 100644 docs/models/operations/createjobresponse.md create mode 100644 docs/models/operations/gettemplaterequest.md create mode 100644 docs/models/operations/gettemplateresponse.md create mode 100644 docs/models/operations/listtemplatesrequest.md create mode 100644 docs/models/operations/listtemplatesresponse.md create mode 100644 docs/models/shared/bodycreatejob.md create mode 100644 docs/models/shared/bodyrunworkflowinputfiles.md create mode 100644 docs/models/shared/templatedetail.md create mode 100644 docs/models/shared/templatelistitem.md create mode 100644 docs/models/shared/templatenode.md delete mode 100644 docs/models/shared/vlmmodel.md create mode 100644 docs/sdks/templates/README.md create mode 100644 src/unstructured_client/models/operations/create_job.py create mode 100644 src/unstructured_client/models/operations/get_template.py create mode 100644 src/unstructured_client/models/operations/list_templates.py create mode 100644 src/unstructured_client/models/shared/body_create_job.py create mode 100644 src/unstructured_client/models/shared/templatedetail.py create mode 100644 src/unstructured_client/models/shared/templatelistitem.py create mode 100644 src/unstructured_client/models/shared/templatenode.py create mode 100644 src/unstructured_client/templates.py diff --git a/.gitignore b/.gitignore index 14e788bb..51b89173 100755 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ +.env.local **/__pycache__/ **/.speakeasy/temp/ **/.speakeasy/logs/ diff --git a/.speakeasy/gen.lock b/.speakeasy/gen.lock index 578a0905..6982a41e 100755 --- a/.speakeasy/gen.lock +++ b/.speakeasy/gen.lock @@ -1,12 +1,12 @@ lockVersion: 2.0.0 id: 8b5fa338-9106-4734-abf0-e30d67044a90 management: - docChecksum: 8038a5b9e376d44bad2fd4c117922954 - docVersion: 1.1.45 + docChecksum: edc837c11b1a19b20f4f8e98bbf5c118 + docVersion: 1.2.21 speakeasyVersion: 1.601.0 generationVersion: 2.680.0 - releaseVersion: 0.42.3 - configChecksum: 3d02dd7ca437781b3d721fab5d7b9adc + releaseVersion: 0.42.4 + configChecksum: 3db8ad876dc10cf3b033f710eb1411d3 repoURL: https://github.com/Unstructured-IO/unstructured-python-client.git repoSubDirectory: . installationURL: https://github.com/Unstructured-IO/unstructured-python-client.git @@ -51,6 +51,8 @@ generatedFiles: - docs/models/operations/createconnectionchecksourcesresponse.md - docs/models/operations/createdestinationrequest.md - docs/models/operations/createdestinationresponse.md + - docs/models/operations/createjobrequest.md + - docs/models/operations/createjobresponse.md - docs/models/operations/createsourcerequest.md - docs/models/operations/createsourceresponse.md - docs/models/operations/createworkflowrequest.md @@ -77,6 +79,8 @@ generatedFiles: - docs/models/operations/getjobresponse.md - docs/models/operations/getsourcerequest.md - docs/models/operations/getsourceresponse.md + - docs/models/operations/gettemplaterequest.md + - docs/models/operations/gettemplateresponse.md - docs/models/operations/getworkflowrequest.md - docs/models/operations/getworkflowresponse.md - docs/models/operations/listdestinationsrequest.md @@ -85,6 +89,8 @@ generatedFiles: - docs/models/operations/listjobsresponse.md - docs/models/operations/listsourcesrequest.md - docs/models/operations/listsourcesresponse.md + - docs/models/operations/listtemplatesrequest.md + - docs/models/operations/listtemplatesresponse.md - docs/models/operations/listworkflowsrequest.md - docs/models/operations/listworkflowsresponse.md - docs/models/operations/partitionrequest.md @@ -101,9 +107,13 @@ generatedFiles: - docs/models/shared/astradbconnectorconfiginput.md - docs/models/shared/azureaisearchconnectorconfig.md - docs/models/shared/azureaisearchconnectorconfiginput.md + - docs/models/shared/azuredestinationconnectorconfig.md + - docs/models/shared/azuredestinationconnectorconfiginput.md - docs/models/shared/azuresourceconnectorconfig.md - docs/models/shared/azuresourceconnectorconfiginput.md + - docs/models/shared/bodycreatejob.md - docs/models/shared/bodyrunworkflow.md + - docs/models/shared/bodyrunworkflowinputfiles.md - docs/models/shared/boxsourceconnectorconfig.md - docs/models/shared/boxsourceconnectorconfiginput.md - docs/models/shared/config.md @@ -205,6 +215,9 @@ generatedFiles: - docs/models/shared/sourceconnectorinformationconfig.md - docs/models/shared/sourceconnectortype.md - docs/models/shared/strategy.md + - docs/models/shared/templatedetail.md + - docs/models/shared/templatelistitem.md + - docs/models/shared/templatenode.md - docs/models/shared/updatedestinationconnector.md - docs/models/shared/updatedestinationconnectorconfig.md - docs/models/shared/updatesourceconnector.md @@ -212,7 +225,6 @@ generatedFiles: - docs/models/shared/updateworkflow.md - docs/models/shared/updateworkflowschedule.md - docs/models/shared/validationerror.md - - docs/models/shared/vlmmodel.md - docs/models/shared/vlmmodelprovider.md - docs/models/shared/weaviatedestinationconnectorconfig.md - docs/models/shared/weaviatedestinationconnectorconfiginput.md @@ -229,6 +241,7 @@ generatedFiles: - docs/sdks/general/README.md - docs/sdks/jobs/README.md - docs/sdks/sources/README.md + - docs/sdks/templates/README.md - docs/sdks/unstructuredclient/README.md - docs/sdks/workflows/README.md - poetry.toml @@ -259,6 +272,7 @@ generatedFiles: - src/unstructured_client/models/operations/create_connection_check_destinations.py - src/unstructured_client/models/operations/create_connection_check_sources.py - src/unstructured_client/models/operations/create_destination.py + - src/unstructured_client/models/operations/create_job.py - src/unstructured_client/models/operations/create_source.py - src/unstructured_client/models/operations/create_workflow.py - src/unstructured_client/models/operations/delete_destination.py @@ -272,10 +286,12 @@ generatedFiles: - src/unstructured_client/models/operations/get_job_details.py - src/unstructured_client/models/operations/get_job_failed_files.py - src/unstructured_client/models/operations/get_source.py + - src/unstructured_client/models/operations/get_template.py - src/unstructured_client/models/operations/get_workflow.py - src/unstructured_client/models/operations/list_destinations.py - src/unstructured_client/models/operations/list_jobs.py - src/unstructured_client/models/operations/list_sources.py + - src/unstructured_client/models/operations/list_templates.py - src/unstructured_client/models/operations/list_workflows.py - src/unstructured_client/models/operations/partition.py - src/unstructured_client/models/operations/run_workflow.py @@ -287,8 +303,11 @@ generatedFiles: - src/unstructured_client/models/shared/astradbconnectorconfiginput.py - src/unstructured_client/models/shared/azureaisearchconnectorconfig.py - src/unstructured_client/models/shared/azureaisearchconnectorconfiginput.py + - src/unstructured_client/models/shared/azuredestinationconnectorconfig.py + - src/unstructured_client/models/shared/azuredestinationconnectorconfiginput.py - src/unstructured_client/models/shared/azuresourceconnectorconfig.py - src/unstructured_client/models/shared/azuresourceconnectorconfiginput.py + - src/unstructured_client/models/shared/body_create_job.py - src/unstructured_client/models/shared/body_run_workflow.py - src/unstructured_client/models/shared/boxsourceconnectorconfig.py - src/unstructured_client/models/shared/boxsourceconnectorconfiginput.py @@ -379,6 +398,9 @@ generatedFiles: - src/unstructured_client/models/shared/sortdirection.py - src/unstructured_client/models/shared/sourceconnectorinformation.py - src/unstructured_client/models/shared/sourceconnectortype.py + - src/unstructured_client/models/shared/templatedetail.py + - src/unstructured_client/models/shared/templatelistitem.py + - src/unstructured_client/models/shared/templatenode.py - src/unstructured_client/models/shared/updatedestinationconnector.py - src/unstructured_client/models/shared/updatesourceconnector.py - src/unstructured_client/models/shared/updateworkflow.py @@ -397,6 +419,7 @@ generatedFiles: - src/unstructured_client/sdk.py - src/unstructured_client/sdkconfiguration.py - src/unstructured_client/sources.py + - src/unstructured_client/templates.py - src/unstructured_client/types/__init__.py - src/unstructured_client/types/basemodel.py - src/unstructured_client/utils/__init__.py @@ -600,7 +623,7 @@ examples: application/json: {"name": "", "workflow_type": "advanced"} responses: "200": - application/json: {"created_at": "2023-01-15T13:52:58.634Z", "destinations": [], "id": "80c8c72f-101a-4a39-974b-49aa299e80d3", "name": "", "schedule": {"crontab_entries": [{"cron_expression": "0 0 * * *"}]}, "sources": ["47678eb0-4958-4341-ad66-b1b822c1deb9"], "status": "active", "workflow_nodes": [{"name": "", "subtype": "", "type": ""}, {"name": "", "subtype": "", "type": ""}]} + application/json: {"created_at": "2023-01-15T13:52:58.634Z", "destinations": [], "id": "80c8c72f-101a-4a39-974b-49aa299e80d3", "name": "", "reprocess_all": false, "schedule": {"crontab_entries": [{"cron_expression": "0 0 * * *"}]}, "sources": ["47678eb0-4958-4341-ad66-b1b822c1deb9"], "status": "active", "workflow_nodes": [{"name": "", "subtype": "", "type": ""}, {"name": "", "subtype": "", "type": ""}]} "422": application/json: {"detail": ""} delete_workflow: @@ -620,7 +643,7 @@ examples: workflow_id: "d031b0e5-7ca7-4a2b-b3cc-d869d2df3e76" responses: "200": - application/json: {"created_at": "2024-01-22T17:42:53.375Z", "destinations": ["624abbf1-dbce-4fe0-8d3f-0673956fc10e"], "id": "840c33c3-f30c-4908-9688-2682ed8eea3f", "name": "", "schedule": {"crontab_entries": [{"cron_expression": "0 0 * * *"}]}, "sources": ["349ed27a-eeda-4851-866a-8ce3a65a2957", "9aa34544-3278-446f-a265-efcaa55f9c4a"], "status": "active", "workflow_nodes": [{"name": "", "subtype": "", "type": ""}, {"name": "", "subtype": "", "type": ""}]} + application/json: {"created_at": "2024-01-22T17:42:53.375Z", "destinations": ["624abbf1-dbce-4fe0-8d3f-0673956fc10e"], "id": "840c33c3-f30c-4908-9688-2682ed8eea3f", "name": "", "reprocess_all": false, "schedule": {"crontab_entries": [{"cron_expression": "0 0 * * *"}]}, "sources": ["349ed27a-eeda-4851-866a-8ce3a65a2957", "9aa34544-3278-446f-a265-efcaa55f9c4a"], "status": "active", "workflow_nodes": [{"name": "", "subtype": "", "type": ""}, {"name": "", "subtype": "", "type": ""}]} "422": application/json: {"detail": ""} list_workflows: @@ -630,7 +653,7 @@ examples: sort_by: "id" responses: "200": - application/json: [{"created_at": "2023-09-02T04:21:23.162Z", "destinations": [], "id": "1e4c63cf-6a6b-407b-b993-b2bc8b3bcefc", "name": "", "schedule": {"crontab_entries": [{"cron_expression": "0 0 * * *"}]}, "sources": ["2e12d926-4caf-4cce-8b5c-3e16722c1c84"], "status": "inactive", "workflow_nodes": [{"name": "", "subtype": "", "type": ""}, {"name": "", "subtype": "", "type": ""}]}, {"created_at": "2023-09-02T04:21:23.162Z", "destinations": [], "id": "1e4c63cf-6a6b-407b-b993-b2bc8b3bcefc", "name": "", "schedule": {"crontab_entries": [{"cron_expression": "0 0 * * *"}]}, "sources": ["2e12d926-4caf-4cce-8b5c-3e16722c1c84"], "status": "inactive", "workflow_nodes": [{"name": "", "subtype": "", "type": ""}, {"name": "", "subtype": "", "type": ""}]}] + application/json: [{"created_at": "2023-09-02T04:21:23.162Z", "destinations": [], "id": "1e4c63cf-6a6b-407b-b993-b2bc8b3bcefc", "name": "", "reprocess_all": false, "schedule": {"crontab_entries": [{"cron_expression": "0 0 * * *"}]}, "sources": ["2e12d926-4caf-4cce-8b5c-3e16722c1c84"], "status": "inactive", "workflow_nodes": [{"name": "", "subtype": "", "type": ""}, {"name": "", "subtype": "", "type": ""}]}, {"created_at": "2023-09-02T04:21:23.162Z", "destinations": [], "id": "1e4c63cf-6a6b-407b-b993-b2bc8b3bcefc", "name": "", "reprocess_all": false, "schedule": {"crontab_entries": [{"cron_expression": "0 0 * * *"}]}, "sources": ["2e12d926-4caf-4cce-8b5c-3e16722c1c84"], "status": "inactive", "workflow_nodes": [{"name": "", "subtype": "", "type": ""}, {"name": "", "subtype": "", "type": ""}]}] "422": application/json: {"detail": [{"loc": ["", 864403], "msg": "", "type": ""}]} run_workflow: @@ -652,13 +675,13 @@ examples: application/json: {} responses: "200": - application/json: {"created_at": "2025-01-11T03:06:48.390Z", "destinations": [], "id": "88fd9cfe-ed29-4701-9d1f-8f26d4ea88d3", "name": "", "schedule": {"crontab_entries": [{"cron_expression": "0 0 * * *"}]}, "sources": ["0957eb7f-57d3-4705-bc49-03de6ad3794d", "a8b88099-9065-4458-a824-b37da7a9bccf", "a024c482-10ba-433a-9100-e6471e9d3db0"], "status": "active", "workflow_nodes": [{"name": "", "subtype": "", "type": ""}]} + application/json: {"created_at": "2025-01-11T03:06:48.390Z", "destinations": [], "id": "88fd9cfe-ed29-4701-9d1f-8f26d4ea88d3", "name": "", "reprocess_all": false, "schedule": {"crontab_entries": [{"cron_expression": "0 0 * * *"}]}, "sources": ["0957eb7f-57d3-4705-bc49-03de6ad3794d", "a8b88099-9065-4458-a824-b37da7a9bccf", "a024c482-10ba-433a-9100-e6471e9d3db0"], "status": "active", "workflow_nodes": [{"name": "", "subtype": "", "type": ""}]} "422": application/json: {"detail": ""} partition: speakeasy-default-partition: requestBody: - multipart/form-data: {"chunking_strategy": "by_title", "combine_under_n_chars": null, "content_type": null, "coordinates": false, "encoding": null, "files": {"": "{\"summary\":\"File to be partitioned\",\"externalValue\":\"https://github.com/Unstructured-IO/unstructured/blob/98d3541909f64290b5efb65a226fc3ee8a7cc5ee/example-docs/layout-parser-paper.pdf\"}"}, "gz_uncompressed_content_type": null, "hi_res_model_name": null, "include_orig_elements": null, "include_page_breaks": false, "include_slide_notes": true, "max_characters": null, "multipage_sections": true, "new_after_n_chars": null, "output_format": "application/json", "overlap": 0, "overlap_all": false, "pdf_infer_table_structure": true, "pdfminer_char_margin": null, "pdfminer_line_margin": null, "pdfminer_line_overlap": null, "pdfminer_word_margin": null, "similarity_threshold": null, "split_pdf_allow_failed": false, "split_pdf_cache_tmp_data": false, "split_pdf_cache_tmp_data_dir": "", "split_pdf_concurrency_level": 5, "split_pdf_page": true, "split_pdf_page_range": [1, 10], "starting_page_number": null, "strategy": "auto", "table_ocr_agent": null, "unique_element_ids": false, "vlm_model": "gpt-4o", "vlm_model_provider": "openai", "xml_keep_tags": false} + multipart/form-data: {"chunking_strategy": "by_title", "combine_under_n_chars": null, "content_type": null, "coordinates": false, "do_not_break_similarity_on_footer_header": false, "encoding": null, "files": {"": "{\"summary\":\"File to be partitioned\",\"externalValue\":\"https://github.com/Unstructured-IO/unstructured/blob/98d3541909f64290b5efb65a226fc3ee8a7cc5ee/example-docs/layout-parser-paper.pdf\"}"}, "gz_uncompressed_content_type": null, "hi_res_model_name": null, "include_orig_elements": null, "include_page_breaks": false, "include_slide_notes": true, "max_characters": null, "multipage_sections": true, "new_after_n_chars": null, "output_format": "application/json", "overlap": 0, "overlap_all": false, "pdf_infer_table_structure": true, "pdfminer_char_margin": null, "pdfminer_line_margin": null, "pdfminer_line_overlap": null, "pdfminer_word_margin": null, "similarity_threshold": null, "split_pdf_allow_failed": false, "split_pdf_cache_tmp_data": false, "split_pdf_cache_tmp_data_dir": "", "split_pdf_concurrency_level": 5, "split_pdf_page": true, "split_pdf_page_range": [1, 10], "starting_page_number": null, "strategy": "auto", "table_ocr_agent": null, "unique_element_ids": false, "vlm_model": "gpt-4o", "vlm_model_provider": "openai", "xml_keep_tags": false} responses: "200": application/json: [{"type": "Title", "element_id": "6aa0ff22f91bbe7e26e8e25ca8052acd", "text": "LayoutParser: A Unified Toolkit for Deep Learning Based Document Image Analysis", "metadata": {"languages": ["eng"], "page_number": 1, "filename": "layout-parser-paper.pdf", "filetype": "application/pdf"}}] @@ -710,5 +733,836 @@ examples: application/json: {"ca_chain": "", "encryption_certificate": ""} "422": application/json: {"detail": []} + create_job: + speakeasy-default-create-job: + requestBody: + multipart/form-data: {"request_data": ""} + responses: + "200": + application/json: {"created_at": "2024-02-18T20:31:27.125Z", "id": "55723ca1-8ca8-4beb-bf8a-b2c41cc6649d", "status": "IN_PROGRESS", "workflow_id": "3f012d20-710b-43a2-8577-22a817a1352a", "workflow_name": ""} + "422": + application/json: {"detail": [{"loc": [""], "msg": "", "type": ""}]} + get_template: + speakeasy-default-get-template: + parameters: + path: + template_id: "" + responses: + "200": + application/json: {"description": "probate legging nor lest wrathful oddly importance metabolite to tricky", "id": "", "last_updated": "", "name": "", "nodes": [], "version": ""} + "422": + application/json: {"detail": ""} + list_templates: + speakeasy-default-list-templates: + responses: + "200": + application/json: [{"description": "without license expostulate gloss specific through huzzah", "id": "", "last_updated": "", "name": "", "version": ""}] + "422": + application/json: {"detail": [{"loc": [""], "msg": "", "type": ""}]} examplesVersion: 1.0.2 generatedTests: {} +trackedFiles: + .gitattributes: + last_write_checksum: sha1:53134de3ada576f37c22276901e1b5b6d85cd2da + .vscode/settings.json: + last_write_checksum: sha1:f84632c81029fcdda8c3b0c768d02b836fc80526 + USAGE.md: + last_write_checksum: sha1:634dd7b8ad241a00d0c69649194b8149006a311f + docs/models/errors/detail.md: + last_write_checksum: sha1:9a864f8e5a23346f45054850eba4f02b4ddfa65e + docs/models/errors/httpvalidationerror.md: + last_write_checksum: sha1:1b74d78fb94bad6150ddd93c4cb937672c970cab + docs/models/errors/servererror.md: + last_write_checksum: sha1:ba10866750f76c3c6b3b1face75aa37a517a8647 + docs/models/operations/canceljobrequest.md: + last_write_checksum: sha1:c68b3b85e0f5d40e4c3b89d2535f2fca286a9d0f + docs/models/operations/canceljobresponse.md: + last_write_checksum: sha1:ec0f1b7ab6075a10ea1dab44b6d3fa6b3872751c + docs/models/operations/createconnectioncheckdestinationsrequest.md: + last_write_checksum: sha1:01884e8ff647912b15633806207dd151c5ab607b + docs/models/operations/createconnectioncheckdestinationsresponse.md: + last_write_checksum: sha1:b64309e1d90d873a7ffc2ea2226848195144938a + docs/models/operations/createconnectionchecksourcesrequest.md: + last_write_checksum: sha1:fecf135c6381a78caf6f461fb55228350356860b + docs/models/operations/createconnectionchecksourcesresponse.md: + last_write_checksum: sha1:56e30e0c8459179cc3a31b01797006652c90bfc8 + docs/models/operations/createdestinationrequest.md: + last_write_checksum: sha1:85e0a1d2aa09c5df3c53c1f1d7f3090270894a44 + docs/models/operations/createdestinationresponse.md: + last_write_checksum: sha1:38f36c08f2331bcfbd84cdc48bf67f987ff92d3b + docs/models/operations/createjobrequest.md: + last_write_checksum: sha1:0e59082844b538644c4800826cfe4df866cd268c + docs/models/operations/createjobresponse.md: + last_write_checksum: sha1:02c375249682eeb195fe10456ad993d24689e878 + docs/models/operations/createsourcerequest.md: + last_write_checksum: sha1:ffad0297c13b3296896555d0684897d1b703806f + docs/models/operations/createsourceresponse.md: + last_write_checksum: sha1:d2fb364df743675b0da662b11f0d905be464d240 + docs/models/operations/createworkflowrequest.md: + last_write_checksum: sha1:b033be68bc9a54bfe5ba70e02f8ad2d6c0fe28fa + docs/models/operations/createworkflowresponse.md: + last_write_checksum: sha1:1e4a1313e118e8831682e28eebf38485b557575b + docs/models/operations/deletedestinationrequest.md: + last_write_checksum: sha1:6ec7ccc6902e45b3fc6408daa324dfb6d4e8cf46 + docs/models/operations/deletedestinationresponse.md: + last_write_checksum: sha1:327ddf8e2d6b50d79d232ad715b3efb34e3f20ec + docs/models/operations/deletesourcerequest.md: + last_write_checksum: sha1:e95a8d22ad72eabcbdd0dbd69b11f819c401848e + docs/models/operations/deletesourceresponse.md: + last_write_checksum: sha1:36c02dde611385af13b4b0fb28e6852aa3dbd584 + docs/models/operations/deleteworkflowrequest.md: + last_write_checksum: sha1:d1cd1549e20332931005cf47ec708ea8856884a5 + docs/models/operations/deleteworkflowresponse.md: + last_write_checksum: sha1:3c48310a3847c7ebb11b26857e54dd2ee3588305 + docs/models/operations/downloadjoboutputrequest.md: + last_write_checksum: sha1:06db3b4bb86be7ec09d580f7c9d2e104f6d50499 + docs/models/operations/downloadjoboutputresponse.md: + last_write_checksum: sha1:ea5ba518eb952b0de1653e70c0f43f444f07a47c + docs/models/operations/getconnectioncheckdestinationsrequest.md: + last_write_checksum: sha1:6b014c77cf8c3341375726e4d3dce855e896b91d + docs/models/operations/getconnectioncheckdestinationsresponse.md: + last_write_checksum: sha1:40e396296a2cbd3be62a339f91b03ef085690d47 + docs/models/operations/getconnectionchecksourcesrequest.md: + last_write_checksum: sha1:4049e5ba7003d344d47ecbca4f6b1f2728cca2b9 + docs/models/operations/getconnectionchecksourcesresponse.md: + last_write_checksum: sha1:0ffceea34fd8ebce0af8a8e3161ed63b659beef4 + docs/models/operations/getdestinationrequest.md: + last_write_checksum: sha1:389f39364c31628df5e602fdc575d78e645ff13d + docs/models/operations/getdestinationresponse.md: + last_write_checksum: sha1:0be5580dc7f3ac4c6ffac9e6cbaa1daf793d391f + docs/models/operations/getjobdetailsrequest.md: + last_write_checksum: sha1:2664e62ce031f0dc61b43f11e855a4e68d6612cc + docs/models/operations/getjobdetailsresponse.md: + last_write_checksum: sha1:e369dae7b58be400a7bdca788e8e024e4439e805 + docs/models/operations/getjobfailedfilesrequest.md: + last_write_checksum: sha1:a6e065b000919f81529227573f5043e7af1a38e4 + docs/models/operations/getjobfailedfilesresponse.md: + last_write_checksum: sha1:1809c62a4d1667857d2e909fe55e72fce25a1bfc + docs/models/operations/getjobrequest.md: + last_write_checksum: sha1:d6f9762d1974bba9ba385ed73c0e575c26f6a59a + docs/models/operations/getjobresponse.md: + last_write_checksum: sha1:a8ebec5c661f78deb38cd8800ab2a1b0fb38b567 + docs/models/operations/getsourcerequest.md: + last_write_checksum: sha1:13f5a06f370562ce113a820a8df2ef8b3f97911e + docs/models/operations/getsourceresponse.md: + last_write_checksum: sha1:49f545d04b85e40a57491ccaaae28987361082f3 + docs/models/operations/gettemplaterequest.md: + last_write_checksum: sha1:ad56d4ee7fd567e09114e2b2701c9da6b11d7476 + docs/models/operations/gettemplateresponse.md: + last_write_checksum: sha1:d8bd2e7d255eaa1aba884a1083fa7867c59b19ee + docs/models/operations/getworkflowrequest.md: + last_write_checksum: sha1:dd6e1bb20c7e1f7ac2946c5bb8bb6efb1ec94bf7 + docs/models/operations/getworkflowresponse.md: + last_write_checksum: sha1:f359eaa994b59e72a95cf6be87cd3379bcbfbeeb + docs/models/operations/listdestinationsrequest.md: + last_write_checksum: sha1:41447ab03579341643950e5836cd2a593679208d + docs/models/operations/listdestinationsresponse.md: + last_write_checksum: sha1:949a7da6768f75840b3cd77f358adf73e9d0399a + docs/models/operations/listjobsrequest.md: + last_write_checksum: sha1:5c6a7d959fecf0ff29cc82551d12de4faaf7c693 + docs/models/operations/listjobsresponse.md: + last_write_checksum: sha1:03ecaf122b7b50667b47d18c77e319ded7637e49 + docs/models/operations/listsourcesrequest.md: + last_write_checksum: sha1:d8b44188c43e7bdcc4b303f1d53396a65e0bb74c + docs/models/operations/listsourcesresponse.md: + last_write_checksum: sha1:6500b17470a02391741239e468b215cdd33a92c0 + docs/models/operations/listtemplatesrequest.md: + last_write_checksum: sha1:ec8b34ed22a760c0c6d61324b177a92f406ca5f1 + docs/models/operations/listtemplatesresponse.md: + last_write_checksum: sha1:08c1a441c1da8b72515eda1758eb2ac41a4ad3a8 + docs/models/operations/listworkflowsrequest.md: + last_write_checksum: sha1:7ccc6cd6c196cbd799aaed2a6b0230e504944ee0 + docs/models/operations/listworkflowsresponse.md: + last_write_checksum: sha1:e733995a841620e81a5f38697097d39d72ecb716 + docs/models/operations/partitionrequest.md: + last_write_checksum: sha1:54014e6854cf8220086322d14cbd6d449d591ce4 + docs/models/operations/partitionresponse.md: + last_write_checksum: sha1:1d0bde8a9fa4ec117ec698a533cc118c0ff0a796 + docs/models/operations/runworkflowrequest.md: + last_write_checksum: sha1:89a954c721a0070cf45fe530edecc8c9bc3a1e36 + docs/models/operations/runworkflowresponse.md: + last_write_checksum: sha1:87b2eb22401bab45a7e7a424adfe0f4dbb2c212e + docs/models/operations/updatedestinationrequest.md: + last_write_checksum: sha1:211264534040d2c9de960b0fe99532c52f6d70f5 + docs/models/operations/updatedestinationresponse.md: + last_write_checksum: sha1:4902193056c6ef6189d13d35ea62524a8216598f + docs/models/operations/updatesourcerequest.md: + last_write_checksum: sha1:61c75282f5df6151587ea2c3b0358056bcd6f39b + docs/models/operations/updatesourceresponse.md: + last_write_checksum: sha1:2b26c5966a7627625a25e57cab0dbaa5af2f47b2 + docs/models/operations/updateworkflowrequest.md: + last_write_checksum: sha1:177a57e200d961ccd8dcf3d1cd21ad9f35d4ec64 + docs/models/operations/updateworkflowresponse.md: + last_write_checksum: sha1:1336948ebad20ffe8e11a836cf0b9faf1d2c0fb4 + docs/models/shared/astradbconnectorconfig.md: + last_write_checksum: sha1:4a8f8de642ce6e995d5cb80b95a87acc57754732 + docs/models/shared/astradbconnectorconfiginput.md: + last_write_checksum: sha1:4e7282d2f5522f401ae02c55ffc7b0a24ca22c95 + docs/models/shared/azureaisearchconnectorconfig.md: + last_write_checksum: sha1:ed41a88fb6bddcc2094f9d8b5660dedba9921969 + docs/models/shared/azureaisearchconnectorconfiginput.md: + last_write_checksum: sha1:1d2f3f886e12e87cb5c46110e871be3e12b4061b + docs/models/shared/azuredestinationconnectorconfig.md: + last_write_checksum: sha1:b5d8532ce7f18b39181f20640385668587253077 + docs/models/shared/azuredestinationconnectorconfiginput.md: + last_write_checksum: sha1:49e9b15d109256f679de41975a7f0e5a0eb9b242 + docs/models/shared/azuresourceconnectorconfig.md: + last_write_checksum: sha1:32b34b7b09d4577be2ddc9324eebee18d80f6fc1 + docs/models/shared/azuresourceconnectorconfiginput.md: + last_write_checksum: sha1:b8c68a56bef1a7e09bf1c55953a1f26a23c3da7e + docs/models/shared/bodycreatejob.md: + last_write_checksum: sha1:f335b69b4915d22989edbbcbe96d5549b389d678 + docs/models/shared/bodyrunworkflow.md: + last_write_checksum: sha1:961071391ef67ad866f6cb4acca4d43d0d939c45 + docs/models/shared/bodyrunworkflowinputfiles.md: + last_write_checksum: sha1:578c4d054cfb283e3e37ee72a36aa01a54da5aca + docs/models/shared/boxsourceconnectorconfig.md: + last_write_checksum: sha1:b000010f3d53f4ec609f9c1e9bcf75d0907843fb + docs/models/shared/boxsourceconnectorconfiginput.md: + last_write_checksum: sha1:e75625a2d8e1092d752dec9d6416ca2dc7ece940 + docs/models/shared/config.md: + last_write_checksum: sha1:9b8db306af8207882a98a06397fb1df25514a5d5 + docs/models/shared/confluencesourceconnectorconfig.md: + last_write_checksum: sha1:f0fc3f07b8dfe0af11928df4f8246168500ec3bb + docs/models/shared/confluencesourceconnectorconfiginput.md: + last_write_checksum: sha1:db8a7e7f5f4d5b231fe8cc4de99f848f4cd6ffb1 + docs/models/shared/connectioncheckstatus.md: + last_write_checksum: sha1:db2996b0cf9d1ac947825fae0072fcd70c2ab374 + docs/models/shared/couchbasedestinationconnectorconfig.md: + last_write_checksum: sha1:f8eef3f3b5aebe168049a683018d9d68e74fc4d6 + docs/models/shared/couchbasedestinationconnectorconfiginput.md: + last_write_checksum: sha1:ecbd67c5513db6864aa3096120442104dac98188 + docs/models/shared/couchbasesourceconnectorconfig.md: + last_write_checksum: sha1:aeb00df6171f1642d9322bbb69c932432de6aebe + docs/models/shared/couchbasesourceconnectorconfiginput.md: + last_write_checksum: sha1:11c20ae7633c07a8e12d80d60ef57c0e79799d79 + docs/models/shared/createdestinationconnector.md: + last_write_checksum: sha1:22cf73e4cf6ca7e97adb7973b2a83ae900039269 + docs/models/shared/createsourceconnector.md: + last_write_checksum: sha1:006ad810becfd9f2d7c8e8ff5de3cb0d0815bf72 + docs/models/shared/createsourceconnectorconfig.md: + last_write_checksum: sha1:29acb21f56b4ddfa16c67240522411fda161afdc + docs/models/shared/createworkflow.md: + last_write_checksum: sha1:8aaf8738ef81e5c333b21e0052015755d21fdb1a + docs/models/shared/crontabentry.md: + last_write_checksum: sha1:2e315fb85c5d83ba9190bf0321c84a09b94dc167 + docs/models/shared/dagnodeconnectioncheck.md: + last_write_checksum: sha1:30ce6ae84a9a21ce8fe4227e7a92445bf5aabcf1 + docs/models/shared/databricksvdtdestinationconnectorconfig.md: + last_write_checksum: sha1:defcbc8a8b513bb619e7c794d9d1b3c230988629 + docs/models/shared/databricksvdtdestinationconnectorconfiginput.md: + last_write_checksum: sha1:f820591adce85b73f0bda0d805c80f4d7cfdd167 + docs/models/shared/databricksvolumesconnectorconfig.md: + last_write_checksum: sha1:5b329fba41166cc85d8f5d3d20df073b5ef3b64c + docs/models/shared/databricksvolumesconnectorconfiginput.md: + last_write_checksum: sha1:8d542e8451b21c518c483f92a84f14971c436d0e + docs/models/shared/deltatableconnectorconfig.md: + last_write_checksum: sha1:91f5278c5797015375fdad027a555dc096d28699 + docs/models/shared/deltatableconnectorconfiginput.md: + last_write_checksum: sha1:fd4a2cc65ff8276774e8b4c335863c082e140d2b + docs/models/shared/destinationconnectorinformation.md: + last_write_checksum: sha1:3ee4e344b304ba428401ae1c74f1aa5bcdc556da + docs/models/shared/destinationconnectorinformationconfig.md: + last_write_checksum: sha1:52df1d0ee4ad0ec7cfd1698b5a72f39a93f00806 + docs/models/shared/destinationconnectortype.md: + last_write_checksum: sha1:3b405c8485530eb259e1607d89af14cf94922526 + docs/models/shared/dropboxsourceconnectorconfig.md: + last_write_checksum: sha1:dd236db60bdbbda1422cc81ec574fe9f3f7f4ce3 + docs/models/shared/dropboxsourceconnectorconfiginput.md: + last_write_checksum: sha1:4f22e588c4855d715876878c6f49e83a9e538870 + docs/models/shared/elasticsearchconnectorconfig.md: + last_write_checksum: sha1:f23bb0fdf87d78a2becc35bb6f2bae133f9d534e + docs/models/shared/elasticsearchconnectorconfiginput.md: + last_write_checksum: sha1:e15f36fe04157454dd5650805ba86f3e822b70a5 + docs/models/shared/encryptiontype.md: + last_write_checksum: sha1:687c0937bd9690e37e9958adaa25a4299d109ba5 + docs/models/shared/failedfile.md: + last_write_checksum: sha1:60da6ce6eb8d531fcb77945d1f2be5a41fe56156 + docs/models/shared/files.md: + last_write_checksum: sha1:3e1b008da7152dbb0849e8d39190773f89068342 + docs/models/shared/gcsdestinationconnectorconfig.md: + last_write_checksum: sha1:99908999197b2b2cc2b84614cae7880824de5f19 + docs/models/shared/gcsdestinationconnectorconfiginput.md: + last_write_checksum: sha1:eebb8063d181f6c29f6af21e50a54bbdc2048332 + docs/models/shared/gcssourceconnectorconfig.md: + last_write_checksum: sha1:d67bf6a4572d8d812c86f6861aaa2aef1ff21115 + docs/models/shared/gcssourceconnectorconfiginput.md: + last_write_checksum: sha1:81c8454525972ec3a058d532cf6c32180e2479f6 + docs/models/shared/googledrivesourceconnectorconfig.md: + last_write_checksum: sha1:888a74236ee6e3099bee7193f10022b27d5e19f8 + docs/models/shared/googledrivesourceconnectorconfiginput.md: + last_write_checksum: sha1:e866f029785a3e71230397080ef83a7d407e89f4 + docs/models/shared/googledrivesourceconnectorconfiginputserviceaccountkey.md: + last_write_checksum: sha1:e7ce3dbb4fefc0102792980dd00761438b7de958 + docs/models/shared/ibmwatsonxs3destinationconnectorconfig.md: + last_write_checksum: sha1:9166cd4e8965f1cc1f4e2f987050e607517e56a2 + docs/models/shared/ibmwatsonxs3destinationconnectorconfiginput.md: + last_write_checksum: sha1:c78f1ac09209fcd1a701db0100691fcb2d776f8e + docs/models/shared/inputfiles.md: + last_write_checksum: sha1:ca7389eeaa181029d40e7816340183113264f250 + docs/models/shared/jirasourceconnectorconfig.md: + last_write_checksum: sha1:e8e593dbc957cf9b0a069e00af628a9ae859c572 + docs/models/shared/jirasourceconnectorconfiginput.md: + last_write_checksum: sha1:063631ef9f815bdd6c143d8863ec465d196d430b + docs/models/shared/jobdetails.md: + last_write_checksum: sha1:c783b6beb703c315043fb9183943262b130fdcb7 + docs/models/shared/jobfailedfiles.md: + last_write_checksum: sha1:5e68b017291bca72e366c3d69795ead12a9ad8fe + docs/models/shared/jobinformation.md: + last_write_checksum: sha1:b7c49fd6e05c92ff17978cd82ea458557f6a9796 + docs/models/shared/jobnodedetails.md: + last_write_checksum: sha1:89ee02ecdf254a2334f2c1c86daab5d3dbc8a1f5 + docs/models/shared/jobprocessingstatus.md: + last_write_checksum: sha1:c1e69d7d8d42feb9f95e95a3feff0a6a5d5294be + docs/models/shared/jobstatus.md: + last_write_checksum: sha1:4e577625cd90e4a2d859d0a5746e8ae37b8a46c7 + docs/models/shared/kafkaclouddestinationconnectorconfig.md: + last_write_checksum: sha1:e765038bd3fb87bb9e9431f212a6b67e7023c1ec + docs/models/shared/kafkaclouddestinationconnectorconfiginput.md: + last_write_checksum: sha1:6362b1a0ab47db453e9e5051a1ff035b507e6a3e + docs/models/shared/kafkacloudsourceconnectorconfig.md: + last_write_checksum: sha1:cbe83f080cb55d50b00e1eacfa2aba870d083100 + docs/models/shared/kafkacloudsourceconnectorconfiginput.md: + last_write_checksum: sha1:6a80f801c0d2710b36a3ee475c9204bdacbfa07c + docs/models/shared/loc.md: + last_write_checksum: sha1:09a04749333ab50ae806c3ac6adcaa90d54df0f1 + docs/models/shared/milvusdestinationconnectorconfig.md: + last_write_checksum: sha1:cb8e3b9c8975429537df3ace8214261df7bec607 + docs/models/shared/milvusdestinationconnectorconfiginput.md: + last_write_checksum: sha1:a865313b317fbd1750189e1244df02a8e194c756 + docs/models/shared/mongodbconnectorconfig.md: + last_write_checksum: sha1:999713952b951e58452e16ef2654f446c3586f23 + docs/models/shared/mongodbconnectorconfiginput.md: + last_write_checksum: sha1:281424ee9db5e1b171c9a26f994ce3ff4238bb49 + docs/models/shared/neo4jdestinationconnectorconfig.md: + last_write_checksum: sha1:92461e51cfea8bd7cd1cbca31a4fd31fa67d3019 + docs/models/shared/neo4jdestinationconnectorconfiginput.md: + last_write_checksum: sha1:82a1d34bca150ca42fc1dfb670e51dc7480e3a33 + docs/models/shared/nodefilemetadata.md: + last_write_checksum: sha1:51a991310b3ba8c9f68f2ea3e30afc2745948ef0 + docs/models/shared/onedrivedestinationconnectorconfig.md: + last_write_checksum: sha1:15ec1487ed4368dfe742be4765538becc0f76668 + docs/models/shared/onedrivedestinationconnectorconfiginput.md: + last_write_checksum: sha1:b5ce374e6e49566481d8c338824587a51986bef0 + docs/models/shared/onedrivesourceconnectorconfig.md: + last_write_checksum: sha1:d0cc7e551dc0509a6f04cd42f2ab3ef58c41dfc7 + docs/models/shared/onedrivesourceconnectorconfiginput.md: + last_write_checksum: sha1:415b2ce792db84e2404226b87091d3051e0bea50 + docs/models/shared/outlooksourceconnectorconfig.md: + last_write_checksum: sha1:2094d7b13d7ee1d4b8b1f93442b68fd4156b13bc + docs/models/shared/outlooksourceconnectorconfiginput.md: + last_write_checksum: sha1:01b672957522d39ba725eb6288df1972607a989b + docs/models/shared/outputformat.md: + last_write_checksum: sha1:2b7d4a5b7b7dc7fd445fbb1e6d2ade4e1fb00ff9 + docs/models/shared/partitionparameters.md: + last_write_checksum: sha1:97be0eccf54324ad0952418803d3d41bb8fd8933 + docs/models/shared/pineconedestinationconnectorconfig.md: + last_write_checksum: sha1:ff6972c9db29b7831dc715cbba7e56809c7515d8 + docs/models/shared/pineconedestinationconnectorconfiginput.md: + last_write_checksum: sha1:84ce5dabb9ba7624e8dd208c675275aa62152f0c + docs/models/shared/postgresdestinationconnectorconfig.md: + last_write_checksum: sha1:714cbab157bfbae86c7bd76859da844e37c21e8e + docs/models/shared/postgresdestinationconnectorconfiginput.md: + last_write_checksum: sha1:799b15a8b42720e50af6ec3a6ef831a94ec905de + docs/models/shared/postgressourceconnectorconfig.md: + last_write_checksum: sha1:2fc63c65296ffaea07d914ce6b287ac59bd295fe + docs/models/shared/postgressourceconnectorconfiginput.md: + last_write_checksum: sha1:3e5b4d837d4c9ec3b24a0175cdeee28640a68362 + docs/models/shared/qdrantclouddestinationconnectorconfig.md: + last_write_checksum: sha1:fb753abb154e30a1641412ca5c79f6283f893e52 + docs/models/shared/qdrantclouddestinationconnectorconfiginput.md: + last_write_checksum: sha1:1e4585d5e49818eb3bd96ce49fdc7352405dd8d0 + docs/models/shared/redisdestinationconnectorconfig.md: + last_write_checksum: sha1:d49bdc8857927c6b94781257e20d1dbb4f931543 + docs/models/shared/redisdestinationconnectorconfiginput.md: + last_write_checksum: sha1:a29bba2080fc820f280e8cbcacdfccd396f463ef + docs/models/shared/s3destinationconnectorconfig.md: + last_write_checksum: sha1:64f2b76d2e2f13365ca18c962682334643559840 + docs/models/shared/s3destinationconnectorconfiginput.md: + last_write_checksum: sha1:3e7f89ac1bd6c9b41e3e3d72d907e9bdc60ed7a3 + docs/models/shared/s3sourceconnectorconfig.md: + last_write_checksum: sha1:5f3f4543408f6d3e827a4941303bec80a81aa4c8 + docs/models/shared/s3sourceconnectorconfiginput.md: + last_write_checksum: sha1:136160436053ae2256f4ec6bdc1b7d90eef74f6f + docs/models/shared/salesforcesourceconnectorconfig.md: + last_write_checksum: sha1:8498d86acc3cc77c30b5a4c7ba1f2b93adcedf9d + docs/models/shared/salesforcesourceconnectorconfiginput.md: + last_write_checksum: sha1:83ef16a5d62c27f56fc4d244c3551af4611b91c0 + docs/models/shared/schedule.md: + last_write_checksum: sha1:bed88c3805344ddaa0164471b736953cd7def29f + docs/models/shared/secretreference.md: + last_write_checksum: sha1:9ab151dee53e96882bfdd0feefb87e4edd9bc0c2 + docs/models/shared/security.md: + last_write_checksum: sha1:19aaecc4930262a902f34479d6916065d2d606b8 + docs/models/shared/serviceaccountkey.md: + last_write_checksum: sha1:f91f5c52465ef2b08b17d116176024954c686121 + docs/models/shared/sharepointsourceconnectorconfig.md: + last_write_checksum: sha1:c534a730bca4073c38a38bbd683b82239ccf6c5c + docs/models/shared/sharepointsourceconnectorconfiginput.md: + last_write_checksum: sha1:050c4dbc29588572c67ec76b20798f38cbfe8107 + docs/models/shared/snowflakedestinationconnectorconfig.md: + last_write_checksum: sha1:b215353f8b03f0f0d5ca7be0e328a981fe87c25b + docs/models/shared/snowflakedestinationconnectorconfiginput.md: + last_write_checksum: sha1:3fe8332ff281a6ba8714750c2a160754edf347e2 + docs/models/shared/snowflakesourceconnectorconfig.md: + last_write_checksum: sha1:ad263dc6b560e84839a56321423e8f9f7881b299 + docs/models/shared/snowflakesourceconnectorconfiginput.md: + last_write_checksum: sha1:7361de32748c0e3982799c32de47afde7faee86d + docs/models/shared/sortdirection.md: + last_write_checksum: sha1:18885c3283c4e2214739bd2f87b879e13defd9bb + docs/models/shared/sourceconnectorinformation.md: + last_write_checksum: sha1:635fbe720ba6de5d875a2ae26a18aba49c118353 + docs/models/shared/sourceconnectorinformationconfig.md: + last_write_checksum: sha1:0263a9f5b6dbc4ef0e07a15239e236be66b5750c + docs/models/shared/sourceconnectortype.md: + last_write_checksum: sha1:1ed56b342903fec2a5c4e6fa8ee843810a90d149 + docs/models/shared/strategy.md: + last_write_checksum: sha1:07155e281d43ece3d6970b315ca1761ea18cce9f + docs/models/shared/templatedetail.md: + last_write_checksum: sha1:92202714571ec3ef30439dc64cbc96c8430d2891 + docs/models/shared/templatelistitem.md: + last_write_checksum: sha1:4faa1971a0ee68512a9c8be9ba30088726ba92ca + docs/models/shared/templatenode.md: + last_write_checksum: sha1:0900ecbd8ebdb0261dc0078d5c4a7423c732aae6 + docs/models/shared/updatedestinationconnector.md: + last_write_checksum: sha1:37c58c6b362551502eabf5eca98e99575d642677 + docs/models/shared/updatedestinationconnectorconfig.md: + last_write_checksum: sha1:9ec4cfd55e52c4f03e99a7154172b22e26af83ba + docs/models/shared/updatesourceconnector.md: + last_write_checksum: sha1:c13569bb40dcaee8f13e077eb5b53d95098ea34d + docs/models/shared/updatesourceconnectorconfig.md: + last_write_checksum: sha1:c110194e464b7c3c530dc0d768c8747f1474755b + docs/models/shared/updateworkflow.md: + last_write_checksum: sha1:94e95e0b8746baf81b33fab234ee5d320a3fae2e + docs/models/shared/updateworkflowschedule.md: + last_write_checksum: sha1:6f8ffbb4ef7891906d94ac6d2b18b693f52717f0 + docs/models/shared/validationerror.md: + last_write_checksum: sha1:5cc3df9eadb5a867d3a0c72debecd66cf30dc339 + docs/models/shared/vlmmodelprovider.md: + last_write_checksum: sha1:3606b2069afaa537150fd1eec58980e438b9aad4 + docs/models/shared/weaviatedestinationconnectorconfig.md: + last_write_checksum: sha1:ab245e5ad184589ec8e67e8d235c0537673a6385 + docs/models/shared/weaviatedestinationconnectorconfiginput.md: + last_write_checksum: sha1:3ef97686843346bf06dc44d14ddaa938e1db84ea + docs/models/shared/workflowinformation.md: + last_write_checksum: sha1:952fc7a8fd9c2a25e69b03bd3514e2a09043e379 + docs/models/shared/workflowjobtype.md: + last_write_checksum: sha1:fae1144db497ed8907f60e85dfd11f662260c136 + docs/models/shared/workflownode.md: + last_write_checksum: sha1:6f7b9ed5d34ce2fd4065e5051e71e509009504b3 + docs/models/shared/workflowschedule.md: + last_write_checksum: sha1:9f70be7c345c609163ed2eff990a6195e74a7bcb + docs/models/shared/workflowstate.md: + last_write_checksum: sha1:a2d728f2c4b333a1f4a80a368a64063886d10067 + docs/models/shared/workflowtype.md: + last_write_checksum: sha1:f21c2a3c61be43f9bff16518596fc87a07a6f67c + docs/models/shared/zendesksourceconnectorconfig.md: + last_write_checksum: sha1:154c44de360321d01d7880cb9cca55ec7a3947bf + docs/models/shared/zendesksourceconnectorconfiginput.md: + last_write_checksum: sha1:f3a5aeb7282bd636d66f37f21cd163d3c4a578c0 + docs/models/utils/retryconfig.md: + last_write_checksum: sha1:562c0f21e308ad10c27f85f75704c15592c6929d + docs/sdks/destinations/README.md: + last_write_checksum: sha1:f1d323f36e6832bfda94da124edd1287ed212ac4 + docs/sdks/general/README.md: + last_write_checksum: sha1:b773733847fc4c876b937db85c23828a170ef959 + docs/sdks/jobs/README.md: + last_write_checksum: sha1:7330be63152f4020d75de2f1ad28f665331bb733 + docs/sdks/sources/README.md: + last_write_checksum: sha1:90cbfaee4c5129854425b6fe23a2592bc0719cc8 + docs/sdks/templates/README.md: + last_write_checksum: sha1:14bad5acd71a4029431503ec8ec3755cea410fae + docs/sdks/workflows/README.md: + last_write_checksum: sha1:1d20be0e51ef1ec5eb3752a58343d88606a1c031 + poetry.toml: + last_write_checksum: sha1:2242305e29dc6921bdf5b200aea5d4bf67830230 + py.typed: + last_write_checksum: sha1:8efc425ffe830805ffcc0f3055871bdcdc542c60 + pylintrc: + last_write_checksum: sha1:db2aebd83e553dd59d3965e79104a3fb780c403a + pyproject.toml: + last_write_checksum: sha1:c8c5dd42f3990f787b8db57c625080e7b24ff1c7 + scripts/prepare_readme.py: + last_write_checksum: sha1:ccdba5069fbb7997500b6ba22366e004394cdca2 + scripts/publish.sh: + last_write_checksum: sha1:b31bafc19c15ab5ea925fdf8d5d4adce2b115a63 + src/unstructured_client/__init__.py: + last_write_checksum: sha1:da077c0bdfcef64a4a5aea91a17292f72fa2b088 + src/unstructured_client/_hooks/__init__.py: + last_write_checksum: sha1:e3111289afd28ad557c21d9e2f918caabfb7037d + src/unstructured_client/_hooks/sdkhooks.py: + last_write_checksum: sha1:526e9428e70784e751eacea2b022a0302a7a17f4 + src/unstructured_client/_hooks/types.py: + last_write_checksum: sha1:ac25bc33fba97274c6554d0e735b4c90c2650a88 + src/unstructured_client/_version.py: + last_write_checksum: sha1:04347a4921c6bf252eba5cb11c1ce385548a1c03 + src/unstructured_client/basesdk.py: + last_write_checksum: sha1:c630e3511b954676ab8b477fd7b11c4ce8f443a7 + src/unstructured_client/destinations.py: + last_write_checksum: sha1:918da90d2ac2ec108be3dfd18ec123a0eca88c9f + src/unstructured_client/httpclient.py: + last_write_checksum: sha1:5e55338d6ee9f01ab648cad4380201a8a3da7dd7 + src/unstructured_client/jobs.py: + last_write_checksum: sha1:d8200021ecd41c623ea2bccfcf284d97d3350936 + src/unstructured_client/models/__init__.py: + last_write_checksum: sha1:fd931d5c2d58b5f9189cc897e038d6d78e362dab + src/unstructured_client/models/errors/__init__.py: + last_write_checksum: sha1:02c8f92c6032c3dfdf6d02bad73b27678aaf20ef + src/unstructured_client/models/errors/httpvalidationerror.py: + last_write_checksum: sha1:93a7813ccdbeec8a3f097d6d2ee63df07366a683 + src/unstructured_client/models/errors/no_response_error.py: + last_write_checksum: sha1:7f326424a7d5ae1bcd5c89a0d6b3dbda9138942f + src/unstructured_client/models/errors/responsevalidationerror.py: + last_write_checksum: sha1:f08e0ff98d80cae65bbac1437d318f76b6affe4c + src/unstructured_client/models/errors/sdkerror.py: + last_write_checksum: sha1:35dbdbf7704b5a1e857e55c29c35cfe030555252 + src/unstructured_client/models/errors/servererror.py: + last_write_checksum: sha1:35ee229ca11f9300f560931b1003ace97d96a719 + src/unstructured_client/models/errors/unstructuredclienterror.py: + last_write_checksum: sha1:5204738e0a98af5f878cf1a4f743b470312d5fbb + src/unstructured_client/models/operations/__init__.py: + last_write_checksum: sha1:5b32ee1e14784aeb16f29dd0f65095f772e09428 + src/unstructured_client/models/operations/cancel_job.py: + last_write_checksum: sha1:fdff92949ca59c2445489f0e75e9e347a1d2d030 + src/unstructured_client/models/operations/create_connection_check_destinations.py: + last_write_checksum: sha1:f5c1a792bc7977a58c2ce3e9325e0b91b821eebb + src/unstructured_client/models/operations/create_connection_check_sources.py: + last_write_checksum: sha1:e81098b56db93e9ff3b557ba6f34224f4c9f6566 + src/unstructured_client/models/operations/create_destination.py: + last_write_checksum: sha1:f6870661f86ccf0342eaa018ef5e5679794e8e1d + src/unstructured_client/models/operations/create_job.py: + last_write_checksum: sha1:09ae7fe3eeaa58327528855e2eb0b315c8756f9d + src/unstructured_client/models/operations/create_source.py: + last_write_checksum: sha1:b774183f8ec9f4796dfbe73adfdf0473027c4db4 + src/unstructured_client/models/operations/create_workflow.py: + last_write_checksum: sha1:64418d73d3dd27ed93b575e2ede742c0bba574ec + src/unstructured_client/models/operations/delete_destination.py: + last_write_checksum: sha1:7832e4aa99fbc1ec98dfc1e650ec0ee1fc2f9d4d + src/unstructured_client/models/operations/delete_source.py: + last_write_checksum: sha1:a2b1be7accf9ae35f5c498a9279793b509fcb740 + src/unstructured_client/models/operations/delete_workflow.py: + last_write_checksum: sha1:951c9c7de8b0aa374aa31084f33a3590785fa1cf + src/unstructured_client/models/operations/download_job_output.py: + last_write_checksum: sha1:8c286fe548d11e81b54ef68a1cbd9d529c51168e + src/unstructured_client/models/operations/get_connection_check_destinations.py: + last_write_checksum: sha1:95509c260c51390ee58f7997091c521d0767044d + src/unstructured_client/models/operations/get_connection_check_sources.py: + last_write_checksum: sha1:8c4a260e608dbea0546a200dadd8f44d585cfe59 + src/unstructured_client/models/operations/get_destination.py: + last_write_checksum: sha1:35303da1961529967c0593c82e04de58b8b1aa7b + src/unstructured_client/models/operations/get_job.py: + last_write_checksum: sha1:087899726039102279a068a023c4e6bb3a076ee0 + src/unstructured_client/models/operations/get_job_details.py: + last_write_checksum: sha1:6fd7de956121f6637618fbe1d67710ba04c551c3 + src/unstructured_client/models/operations/get_job_failed_files.py: + last_write_checksum: sha1:f5e4bdb03af4303140f7441e7b6d75705b5475f9 + src/unstructured_client/models/operations/get_source.py: + last_write_checksum: sha1:199bbc799cdc9adfd78a5cd72defa66360747252 + src/unstructured_client/models/operations/get_template.py: + last_write_checksum: sha1:c191b7baae646b12425b84a9ae6e6e9423c2957f + src/unstructured_client/models/operations/get_workflow.py: + last_write_checksum: sha1:69f91249ed4e8aefe358911b3e38ac1924448376 + src/unstructured_client/models/operations/list_destinations.py: + last_write_checksum: sha1:24d8c36d71132b8828e1d895b22ed92a8c0fd6a4 + src/unstructured_client/models/operations/list_jobs.py: + last_write_checksum: sha1:d49d4046bddc33647aeb6efc9d07b736ca05b7e1 + src/unstructured_client/models/operations/list_sources.py: + last_write_checksum: sha1:b3fc2fbe9911c73074f2bc4de90e62809807c23d + src/unstructured_client/models/operations/list_templates.py: + last_write_checksum: sha1:705ddddc3fb6dce6653c84775745540f60ee81ee + src/unstructured_client/models/operations/list_workflows.py: + last_write_checksum: sha1:fcc39a140094e3e1b05a4a7872b8d06455b4bd12 + src/unstructured_client/models/operations/partition.py: + last_write_checksum: sha1:7eef65659f7c295af8500c50807a0b1902212945 + src/unstructured_client/models/operations/run_workflow.py: + last_write_checksum: sha1:af3fc637859d771f243d2f4c8098587312b3aebe + src/unstructured_client/models/operations/update_destination.py: + last_write_checksum: sha1:6e3dfce61ad34446f17299dc04853efaf6cea653 + src/unstructured_client/models/operations/update_source.py: + last_write_checksum: sha1:5ad731a1ccc08db5dc444a79545f3d59b6989429 + src/unstructured_client/models/operations/update_workflow.py: + last_write_checksum: sha1:2987bcb5781b5002aa307e5255c7072f92b026a6 + src/unstructured_client/models/shared/__init__.py: + last_write_checksum: sha1:84789d683d1f00f60dcf41e302d0812b7a72e2c7 + src/unstructured_client/models/shared/astradbconnectorconfig.py: + last_write_checksum: sha1:c9ada225d014655e2800c4dac8dcb95282a12cbc + src/unstructured_client/models/shared/astradbconnectorconfiginput.py: + last_write_checksum: sha1:452a4e0f89079aa1520e22c5920cc7927efb0b46 + src/unstructured_client/models/shared/azureaisearchconnectorconfig.py: + last_write_checksum: sha1:b04bb92d9803b10f9dacb9b489eb1da2d8625829 + src/unstructured_client/models/shared/azureaisearchconnectorconfiginput.py: + last_write_checksum: sha1:9afce8ddf0ed5337f7b3736b83f358bd552ffea0 + src/unstructured_client/models/shared/azuredestinationconnectorconfig.py: + last_write_checksum: sha1:4ddfddae0ce96a11c48e7969046b02295b7f14b2 + src/unstructured_client/models/shared/azuredestinationconnectorconfiginput.py: + last_write_checksum: sha1:099467bc2c3d8d8382270c7daa1cc9c4d1efa179 + src/unstructured_client/models/shared/azuresourceconnectorconfig.py: + last_write_checksum: sha1:80aedf8737ed5f3fd921bbd0f928687c6405920a + src/unstructured_client/models/shared/azuresourceconnectorconfiginput.py: + last_write_checksum: sha1:6d0ed886dcb08fc2a5260e84c02ba4b284801fe4 + src/unstructured_client/models/shared/body_create_job.py: + last_write_checksum: sha1:4e6c48f7dc1571c15daed682392aeab8f4e37211 + src/unstructured_client/models/shared/body_run_workflow.py: + last_write_checksum: sha1:d3e8d5202144158ef9f9864aa8f60e1615832ffd + src/unstructured_client/models/shared/boxsourceconnectorconfig.py: + last_write_checksum: sha1:1d0f39cf77cb04dd8945d6715b7a64e8927b7f3d + src/unstructured_client/models/shared/boxsourceconnectorconfiginput.py: + last_write_checksum: sha1:4ed28081e8a1bcf484d1efdbba96726e505ce916 + src/unstructured_client/models/shared/confluencesourceconnectorconfig.py: + last_write_checksum: sha1:645198dcdb1ca26202519f88c6c21a91e3f2942e + src/unstructured_client/models/shared/confluencesourceconnectorconfiginput.py: + last_write_checksum: sha1:f8165c831c1300a0ebe5a7fa563ce1a77e2cd904 + src/unstructured_client/models/shared/connectioncheckstatus.py: + last_write_checksum: sha1:ec33f9af1d8679943a3988f3ec2f4b98a5d2c602 + src/unstructured_client/models/shared/couchbasedestinationconnectorconfig.py: + last_write_checksum: sha1:a5292001d8ef71f6baf27c1022ee4ce1b3c8c6c4 + src/unstructured_client/models/shared/couchbasedestinationconnectorconfiginput.py: + last_write_checksum: sha1:276e270b32835cf1bc3e4da7d70ada24c6d201ac + src/unstructured_client/models/shared/couchbasesourceconnectorconfig.py: + last_write_checksum: sha1:19f21ab3e207954c6970137c84fd106d6c01a3fc + src/unstructured_client/models/shared/couchbasesourceconnectorconfiginput.py: + last_write_checksum: sha1:87a98b17a3b14b9defe1b018e4c98a1d9aeea73b + src/unstructured_client/models/shared/createdestinationconnector.py: + last_write_checksum: sha1:1c60a323f80a24edeec45c4cbc15c85432cc3c6c + src/unstructured_client/models/shared/createsourceconnector.py: + last_write_checksum: sha1:a2436029be0bfe9533c607b1ed3f26d310d0e522 + src/unstructured_client/models/shared/createworkflow.py: + last_write_checksum: sha1:9201d99f3a9ee2d561b68b68d2af487ee9e21f9d + src/unstructured_client/models/shared/crontabentry.py: + last_write_checksum: sha1:feae141e003f5248883a7fc3563ad7af44b53b71 + src/unstructured_client/models/shared/dagnodeconnectioncheck.py: + last_write_checksum: sha1:0cd1b38d1f51f121f18ec769c7d4a9cd2d37cc97 + src/unstructured_client/models/shared/databricksvdtdestinationconnectorconfig.py: + last_write_checksum: sha1:85121248ecc45fe662093a00298b5ab4539100e8 + src/unstructured_client/models/shared/databricksvdtdestinationconnectorconfiginput.py: + last_write_checksum: sha1:a99f3d542753b6bf1700d6c94fbcbcf5554d3b32 + src/unstructured_client/models/shared/databricksvolumesconnectorconfig.py: + last_write_checksum: sha1:974fa6e16d1f4099630a274e9ba6619cd856cb68 + src/unstructured_client/models/shared/databricksvolumesconnectorconfiginput.py: + last_write_checksum: sha1:a458440318bbb57ca077b951266703a4ddaf64ef + src/unstructured_client/models/shared/deltatableconnectorconfig.py: + last_write_checksum: sha1:1fdbc330f0d0ce905d7822eccdfe68142cbf6b74 + src/unstructured_client/models/shared/deltatableconnectorconfiginput.py: + last_write_checksum: sha1:42ebe9712bc0f083b890cd7ff78208d9e838dfb8 + src/unstructured_client/models/shared/destinationconnectorinformation.py: + last_write_checksum: sha1:36455d4144c5de6a6ca78078006679ced5cc0a6e + src/unstructured_client/models/shared/destinationconnectortype.py: + last_write_checksum: sha1:00ec603c427b636ae82d8a688f5383534e12ab69 + src/unstructured_client/models/shared/dropboxsourceconnectorconfig.py: + last_write_checksum: sha1:975b665d4a1280a04558b963d7564dafaa04afc1 + src/unstructured_client/models/shared/dropboxsourceconnectorconfiginput.py: + last_write_checksum: sha1:216b38301a9ceb44a52486ae3f19f847c088337b + src/unstructured_client/models/shared/elasticsearchconnectorconfig.py: + last_write_checksum: sha1:2b6736c9464d0fbd15d22ad22d614776231b37ba + src/unstructured_client/models/shared/elasticsearchconnectorconfiginput.py: + last_write_checksum: sha1:72f06107b76ca6177cd7beefb7f45286461d0d31 + src/unstructured_client/models/shared/encryptiontype.py: + last_write_checksum: sha1:224acc1fccb4bc7a61cece851990685caeee67a0 + src/unstructured_client/models/shared/failedfile.py: + last_write_checksum: sha1:e090c5b933f8e279910e5faaa2bf50dd3eb84f13 + src/unstructured_client/models/shared/gcsdestinationconnectorconfig.py: + last_write_checksum: sha1:bcb55093e2c7dbf26dcc5b51462a7f837ef0dbbf + src/unstructured_client/models/shared/gcsdestinationconnectorconfiginput.py: + last_write_checksum: sha1:c8cf40b27ae62f62cf75f112ab8f7ecde4ea1a74 + src/unstructured_client/models/shared/gcssourceconnectorconfig.py: + last_write_checksum: sha1:8cd827af3c05ac4af452e8bd37c6220f4a73b050 + src/unstructured_client/models/shared/gcssourceconnectorconfiginput.py: + last_write_checksum: sha1:9d9a968cc78f3378c5aeac2423d98b3a0eb01760 + src/unstructured_client/models/shared/googledrivesourceconnectorconfig.py: + last_write_checksum: sha1:94e22103b01b68e9b29e361e9543bd44d02873e4 + src/unstructured_client/models/shared/googledrivesourceconnectorconfiginput.py: + last_write_checksum: sha1:8cea91d87af52ea443a4614f3e54c5a244bc2ed2 + src/unstructured_client/models/shared/ibmwatsonxs3destinationconnectorconfig.py: + last_write_checksum: sha1:2dcf63d86b10d05c2b15c94e8c39dde93517b5c9 + src/unstructured_client/models/shared/ibmwatsonxs3destinationconnectorconfiginput.py: + last_write_checksum: sha1:0a9bc9d32bf1f6201ebffbc68a84d0e3323de9cd + src/unstructured_client/models/shared/jirasourceconnectorconfig.py: + last_write_checksum: sha1:5a475b7cb0ebb65a1ff921a3a5e09c77c5042258 + src/unstructured_client/models/shared/jirasourceconnectorconfiginput.py: + last_write_checksum: sha1:796325fd5416e5a6e40851c291ec33358aa762a9 + src/unstructured_client/models/shared/jobdetails.py: + last_write_checksum: sha1:1f9cec3b47347535ff23a200600bfa83572d9fd8 + src/unstructured_client/models/shared/jobfailedfiles.py: + last_write_checksum: sha1:b30fcb1173542a248bdbe676c82c24673ddb1d76 + src/unstructured_client/models/shared/jobinformation.py: + last_write_checksum: sha1:5a51e8012bc008d2791b69dee911faacea6d7764 + src/unstructured_client/models/shared/jobnodedetails.py: + last_write_checksum: sha1:de6ee17639eeff181eff2fb8a481401a2c250062 + src/unstructured_client/models/shared/jobprocessingstatus.py: + last_write_checksum: sha1:fcdf2f6a9ce596ed1a5b966890544bb2e40da68f + src/unstructured_client/models/shared/jobstatus.py: + last_write_checksum: sha1:ff4c2af6e09b05d25cd753e70f143fcb13d33f99 + src/unstructured_client/models/shared/kafkaclouddestinationconnectorconfig.py: + last_write_checksum: sha1:bc303ce1eb44468f123b3ac3caaf92820f41398f + src/unstructured_client/models/shared/kafkaclouddestinationconnectorconfiginput.py: + last_write_checksum: sha1:555620cbd0f512ce484a289f095f357ab0963227 + src/unstructured_client/models/shared/kafkacloudsourceconnectorconfig.py: + last_write_checksum: sha1:b6164b716d36839df0b5c9d2f0af75fda82f9664 + src/unstructured_client/models/shared/kafkacloudsourceconnectorconfiginput.py: + last_write_checksum: sha1:aa7692f1f7e3eb6744d34f81a702659803499a6e + src/unstructured_client/models/shared/milvusdestinationconnectorconfig.py: + last_write_checksum: sha1:464a01f830a6d57dd1b49550e0ded2a3bebe43eb + src/unstructured_client/models/shared/milvusdestinationconnectorconfiginput.py: + last_write_checksum: sha1:fc8a66fbee8f04c45a4ecb6ecc1c5f1e33e538d2 + src/unstructured_client/models/shared/mongodbconnectorconfig.py: + last_write_checksum: sha1:94bfa797fa7773f62fdb93748e81373313fdb555 + src/unstructured_client/models/shared/mongodbconnectorconfiginput.py: + last_write_checksum: sha1:2ca575b07c184409c4bd9d0c05f9c5d39e14b5c5 + src/unstructured_client/models/shared/neo4jdestinationconnectorconfig.py: + last_write_checksum: sha1:5e7caa96b7df49093c9ee634ac02fda3f1d90450 + src/unstructured_client/models/shared/neo4jdestinationconnectorconfiginput.py: + last_write_checksum: sha1:77d08a4a5215c377c8b70eb7e63b244c8798af7f + src/unstructured_client/models/shared/nodefilemetadata.py: + last_write_checksum: sha1:df211de02dd413ca8f64d8298efcdd1b867ad480 + src/unstructured_client/models/shared/onedrivedestinationconnectorconfig.py: + last_write_checksum: sha1:25af33d491d0bb604255c8e3fb9fcb6bf0ddb486 + src/unstructured_client/models/shared/onedrivedestinationconnectorconfiginput.py: + last_write_checksum: sha1:c697878a9f8fa3a2a7eb59df0ba13c413c382ed6 + src/unstructured_client/models/shared/onedrivesourceconnectorconfig.py: + last_write_checksum: sha1:af9f70cc946dd96d58b1164344b0f5682490cce2 + src/unstructured_client/models/shared/onedrivesourceconnectorconfiginput.py: + last_write_checksum: sha1:50f983e8f78b96d8523ea0c0df7e65f560cb6ea0 + src/unstructured_client/models/shared/outlooksourceconnectorconfig.py: + last_write_checksum: sha1:e414684ad984614ea53f18d3525c35ac023e3b62 + src/unstructured_client/models/shared/outlooksourceconnectorconfiginput.py: + last_write_checksum: sha1:7b836a607128f7afe99b2f9ee6a56e8069bae81a + src/unstructured_client/models/shared/partition_parameters.py: + last_write_checksum: sha1:03c00b398edaae18521b3e215cb723a51bf5c940 + src/unstructured_client/models/shared/pineconedestinationconnectorconfig.py: + last_write_checksum: sha1:a350aef0d94ed7a118d4acb8de1687e6abb54631 + src/unstructured_client/models/shared/pineconedestinationconnectorconfiginput.py: + last_write_checksum: sha1:a7a6755566625df46201c3672c5e34cf7efd92d3 + src/unstructured_client/models/shared/postgresdestinationconnectorconfig.py: + last_write_checksum: sha1:59191acdd3a316d709c6747c4fa5a68757a9907a + src/unstructured_client/models/shared/postgresdestinationconnectorconfiginput.py: + last_write_checksum: sha1:c959eee670ceff082e8bec05264efd4a89e1c5d2 + src/unstructured_client/models/shared/postgressourceconnectorconfig.py: + last_write_checksum: sha1:d4cec9d147bee01ef97c2328acd4764a3fba54ef + src/unstructured_client/models/shared/postgressourceconnectorconfiginput.py: + last_write_checksum: sha1:73084827edce35a68b9071ca989350bb416d20be + src/unstructured_client/models/shared/qdrantclouddestinationconnectorconfig.py: + last_write_checksum: sha1:6069ea42140aac20a1d4a696dcde414964b17753 + src/unstructured_client/models/shared/qdrantclouddestinationconnectorconfiginput.py: + last_write_checksum: sha1:4c16445610cf6314e8af45d47dd872933762461c + src/unstructured_client/models/shared/redisdestinationconnectorconfig.py: + last_write_checksum: sha1:6aa584d9564ee1ba16aaab352a34b4da2124c623 + src/unstructured_client/models/shared/redisdestinationconnectorconfiginput.py: + last_write_checksum: sha1:7defebd3c1c1ecd2e633653e3bf09d66f41c65b0 + src/unstructured_client/models/shared/s3destinationconnectorconfig.py: + last_write_checksum: sha1:60a757df68981b64f07c5d6420bab65d50fc4ac7 + src/unstructured_client/models/shared/s3destinationconnectorconfiginput.py: + last_write_checksum: sha1:044395f271a259844c0da10398cf826fc6797367 + src/unstructured_client/models/shared/s3sourceconnectorconfig.py: + last_write_checksum: sha1:5780fefd557dd44cf4eb097413c61bdeeb2f0feb + src/unstructured_client/models/shared/s3sourceconnectorconfiginput.py: + last_write_checksum: sha1:2abf0e536ae85a98f2912419d9826403dcad57a7 + src/unstructured_client/models/shared/salesforcesourceconnectorconfig.py: + last_write_checksum: sha1:d3ed1a04b4965511e3242651ec2475eee5b2435b + src/unstructured_client/models/shared/salesforcesourceconnectorconfiginput.py: + last_write_checksum: sha1:61fe5539cab69ef33c2df0592ab0a90f96ae3e62 + src/unstructured_client/models/shared/secretreference.py: + last_write_checksum: sha1:7412d990eeee22a89e6a506527eceecea7437f9e + src/unstructured_client/models/shared/security.py: + last_write_checksum: sha1:07ee315df44c21dc5950347b2a1300f23aea53bc + src/unstructured_client/models/shared/sharepointsourceconnectorconfig.py: + last_write_checksum: sha1:2361dfd298822d6171e3e92e68d1026b1627e488 + src/unstructured_client/models/shared/sharepointsourceconnectorconfiginput.py: + last_write_checksum: sha1:e433ac3c21f8d74b0101f84edd8ccd65dbfd3371 + src/unstructured_client/models/shared/snowflakedestinationconnectorconfig.py: + last_write_checksum: sha1:5f73e0ec0e712a7695dd063b73e73c197cf7e8ad + src/unstructured_client/models/shared/snowflakedestinationconnectorconfiginput.py: + last_write_checksum: sha1:ed92796870092cae8b3a4d991dce8830836f806c + src/unstructured_client/models/shared/snowflakesourceconnectorconfig.py: + last_write_checksum: sha1:4d7fe8b816d43065fce3249149d2613e78f72e93 + src/unstructured_client/models/shared/snowflakesourceconnectorconfiginput.py: + last_write_checksum: sha1:0267848ad53dcdc2f65443d9c569c30683792f25 + src/unstructured_client/models/shared/sortdirection.py: + last_write_checksum: sha1:316a46717625825ea3a622089df62f1c02be1482 + src/unstructured_client/models/shared/sourceconnectorinformation.py: + last_write_checksum: sha1:699d383ccaee500ea027e4bd277af43e614b9b24 + src/unstructured_client/models/shared/sourceconnectortype.py: + last_write_checksum: sha1:7c7ae7a8453ad266f3da93be17eddc48be119807 + src/unstructured_client/models/shared/templatedetail.py: + last_write_checksum: sha1:05292e12a45407c2cb6c500386f2d28298e2129f + src/unstructured_client/models/shared/templatelistitem.py: + last_write_checksum: sha1:8aeb58b48884855d75b0eab458cd20298816f17b + src/unstructured_client/models/shared/templatenode.py: + last_write_checksum: sha1:e4eb4fda54c7f177addc32f786a59cdaf886ea1d + src/unstructured_client/models/shared/updatedestinationconnector.py: + last_write_checksum: sha1:ac3328aa63eabc4d83f993fc7532cf99f141dff8 + src/unstructured_client/models/shared/updatesourceconnector.py: + last_write_checksum: sha1:25577dad79e9eca6b8930bf390eb8c5ab94fecc6 + src/unstructured_client/models/shared/updateworkflow.py: + last_write_checksum: sha1:f14b03dfc4d74915d60369f07f17dd6805820cc0 + src/unstructured_client/models/shared/validationerror.py: + last_write_checksum: sha1:d9824b38a8e37ec6b290ee08643569713ad3a093 + src/unstructured_client/models/shared/weaviatedestinationconnectorconfig.py: + last_write_checksum: sha1:d30dbd2c90e6092714067c4fbaf7c99bd90e0a24 + src/unstructured_client/models/shared/weaviatedestinationconnectorconfiginput.py: + last_write_checksum: sha1:a11f1165b1e0f74057fbd141f7cc81c438cfd849 + src/unstructured_client/models/shared/workflowinformation.py: + last_write_checksum: sha1:2f4b751b1982f701cfff0e1e1fef9459fe759c82 + src/unstructured_client/models/shared/workflowjobtype.py: + last_write_checksum: sha1:b517456e9467a16ea8704e72da18b32971d8dc1f + src/unstructured_client/models/shared/workflownode.py: + last_write_checksum: sha1:6638c8382528b58ae3557892859ce266affea592 + src/unstructured_client/models/shared/workflowschedule.py: + last_write_checksum: sha1:3cb03c67f1cdcd634dccc6d0f4d852d19e436f04 + src/unstructured_client/models/shared/workflowstate.py: + last_write_checksum: sha1:1de3dc7980ca8b5d64f6cb40673ceff9f0d69e1b + src/unstructured_client/models/shared/workflowtype.py: + last_write_checksum: sha1:6db14bdb8108eac8bd050f0e4dbb252d312f09b7 + src/unstructured_client/models/shared/zendesksourceconnectorconfig.py: + last_write_checksum: sha1:e476e4bfae1aec4eb735944fc53d5271ac6e9a7f + src/unstructured_client/models/shared/zendesksourceconnectorconfiginput.py: + last_write_checksum: sha1:53d57a96ddd09e8af9ef3747c6d948ec7ef63651 + src/unstructured_client/py.typed: + last_write_checksum: sha1:8efc425ffe830805ffcc0f3055871bdcdc542c60 + src/unstructured_client/sdk.py: + last_write_checksum: sha1:beca014799697f82eb44d63e66ca251e417d5fce + src/unstructured_client/sdkconfiguration.py: + last_write_checksum: sha1:30ec09b03bc31e647e1ca4014979027d47616085 + src/unstructured_client/sources.py: + last_write_checksum: sha1:418f61c8c13359ebfc0e8bf09cab81f65f583dc2 + src/unstructured_client/templates.py: + last_write_checksum: sha1:26f52e5b28cf8b0b9a66ddc945d975a6fe0ea06c + src/unstructured_client/types/__init__.py: + last_write_checksum: sha1:140ebdd01a46f92ffc710c52c958c4eba3cf68ed + src/unstructured_client/types/basemodel.py: + last_write_checksum: sha1:615d0b364fa924b0fef719958df34596cc7c1ae2 + src/unstructured_client/utils/__init__.py: + last_write_checksum: sha1:81e0385b93362e0f3f6911b65bd4cc601ebc11e1 + src/unstructured_client/utils/annotations.py: + last_write_checksum: sha1:a4824ad65f730303e4e1e3ec1febf87b4eb46dbc + src/unstructured_client/utils/datetimes.py: + last_write_checksum: sha1:c721e4123000e7dc61ec52b28a739439d9e17341 + src/unstructured_client/utils/enums.py: + last_write_checksum: sha1:786ba597f79dca6fbc0d87c591752bb8d775ecb7 + src/unstructured_client/utils/eventstreaming.py: + last_write_checksum: sha1:bababae5d54b7efc360db701daa49e18a92c2f3b + src/unstructured_client/utils/forms.py: + last_write_checksum: sha1:15fa7e9ab1611e062a9984cf06cb20969713d295 + src/unstructured_client/utils/headers.py: + last_write_checksum: sha1:7c6df233ee006332b566a8afa9ce9a245941d935 + src/unstructured_client/utils/logger.py: + last_write_checksum: sha1:f3fdb154a3f09b8cc43d74c7e9c02f899f8086e4 + src/unstructured_client/utils/metadata.py: + last_write_checksum: sha1:c6a560bd0c63ab158582f34dadb69433ea73b3d4 + src/unstructured_client/utils/queryparams.py: + last_write_checksum: sha1:b94c3f314fd3da0d1d215afc2731f48748e2aa59 + src/unstructured_client/utils/requestbodies.py: + last_write_checksum: sha1:e0a3a78158eba39880475d62d61be906625676b8 + src/unstructured_client/utils/retries.py: + last_write_checksum: sha1:5b97ac4f59357d70c2529975d50364c88bcad607 + src/unstructured_client/utils/security.py: + last_write_checksum: sha1:a17130ace2c0db6394f38dd941ad2b700cc755c8 + src/unstructured_client/utils/serializers.py: + last_write_checksum: sha1:a0d184ace7371a14a7d005cca7f358a03e3d4b07 + src/unstructured_client/utils/unmarshal_json_response.py: + last_write_checksum: sha1:44d94d805cd5b5d3f3a7c124dd86d383b6bd62cc + src/unstructured_client/utils/url.py: + last_write_checksum: sha1:6479961baa90432ca25626f8e40a7bbc32e73b41 + src/unstructured_client/utils/values.py: + last_write_checksum: sha1:acaa178a7c41ddd000f58cc691e4632d925b2553 + src/unstructured_client/workflows.py: + last_write_checksum: sha1:eb92bdbf1fa408dc6d71738341cd853a51a466ae diff --git a/.speakeasy/workflow.lock b/.speakeasy/workflow.lock index ca7f6466..bff48ae4 100644 --- a/.speakeasy/workflow.lock +++ b/.speakeasy/workflow.lock @@ -2,20 +2,20 @@ speakeasyVersion: 1.601.0 sources: my-source: sourceNamespace: my-source - sourceRevisionDigest: sha256:8165f715321cd34bcebb6c9bb0734a1791777229937787b8e13707d519b05a5e - sourceBlobDigest: sha256:b4f8d9a6b0f4245c50b5e53298343df470978417248fa3988aeeb84ec9200c93 + sourceRevisionDigest: sha256:b52b0c33c6a50addb404027e76807677297f288b2b1f22eaf9713b528ca64c9c + sourceBlobDigest: sha256:15c781166845c591afeb3eec92a8ed77f867be120294a01fd34b48538d285b9a tags: - latest - - speakeasy-sdk-regen-1754698272 - - 1.1.45 + - speakeasy-sdk-regen-1765576473 + - 1.2.21 targets: unstructured-python: source: my-source sourceNamespace: my-source - sourceRevisionDigest: sha256:8165f715321cd34bcebb6c9bb0734a1791777229937787b8e13707d519b05a5e - sourceBlobDigest: sha256:b4f8d9a6b0f4245c50b5e53298343df470978417248fa3988aeeb84ec9200c93 + sourceRevisionDigest: sha256:b52b0c33c6a50addb404027e76807677297f288b2b1f22eaf9713b528ca64c9c + sourceBlobDigest: sha256:15c781166845c591afeb3eec92a8ed77f867be120294a01fd34b48538d285b9a codeSamplesNamespace: my-source-code-samples - codeSamplesRevisionDigest: sha256:a5e11972bfb15e43b7fcb2647b3a7b3e129e9303294f9110670280971b10780a + codeSamplesRevisionDigest: sha256:43c867f2d8d6bf35ac4afe0dc12c6601d71bbf62de31b3d4c0b93c8d4778f6a9 workflow: workflowVersion: 1.0.0 speakeasyVersion: latest diff --git a/README.md b/README.md index da29a17a..17bfc6f4 100644 --- a/README.md +++ b/README.md @@ -221,7 +221,7 @@ with UnstructuredClient() as uc_client: **Inherit from [`UnstructuredClientError`](./src/unstructured_client/models/errors/unstructuredclienterror.py)**: -* [`ServerError`](./src/unstructured_client/models/errors/servererror.py): Server Error. Status code `5XX`. Applicable to 1 of 27 methods.* +* [`ServerError`](./src/unstructured_client/models/errors/servererror.py): Server Error. Status code `5XX`. Applicable to 1 of 30 methods.* * [`ResponseValidationError`](./src/unstructured_client/models/errors/responsevalidationerror.py): Type mismatch between the response data and the expected Pydantic model. Provides access to the Pydantic validation error via the `cause` attribute. @@ -443,8 +443,10 @@ from unstructured_client import UnstructuredClient with UnstructuredClient() as uc_client: - res = uc_client.workflows.run_workflow(request={ - "workflow_id": "e7054f23-ce92-4bf1-a1d7-7cf9cb14d013", + res = uc_client.jobs.create_job(request={ + "body_create_job": { + "request_data": "", + }, }) assert res.job_information is not None diff --git a/RELEASES.md b/RELEASES.md index adc5b192..e727600c 100644 --- a/RELEASES.md +++ b/RELEASES.md @@ -1130,3 +1130,14 @@ Based on: - [python v0.42.3] . ### Releases - [PyPI v0.42.4] https://pypi.org/project/unstructured-client/0.42.4 - . + + +## 2025-12-12 21:54:13 +### Changes +Based on: +- OpenAPI Doc +- Speakeasy CLI 1.601.0 (2.680.0) https://github.com/speakeasy-api/speakeasy +### Generated +- [python v0.42.4] . +### Releases +- [PyPI v0.42.4] https://pypi.org/project/unstructured-client/0.42.4 - . \ No newline at end of file diff --git a/codeSamples.yaml b/codeSamples.yaml index f5eda8f2..b903ad06 100644 --- a/codeSamples.yaml +++ b/codeSamples.yaml @@ -14,7 +14,7 @@ actions: "x-codeSamples": - "lang": "python" "label": "create_destination" - "source": "from unstructured_client import UnstructuredClient\nfrom unstructured_client.models import shared\n\n\nwith UnstructuredClient() as uc_client:\n\n res = uc_client.destinations.create_destination(request={\n \"create_destination_connector\": {\n \"name\": \"\",\n \"type\": shared.DestinationConnectorType.MOTHERDUCK,\n \"config\": {\n \"index_name\": \"\",\n \"api_key\": \"\",\n \"namespace\": \"\",\n \"batch_size\": 50,\n },\n },\n })\n\n assert res.destination_connector_information is not None\n\n # Handle response\n print(res.destination_connector_information)" + "source": "from unstructured_client import UnstructuredClient\nfrom unstructured_client.models import shared\n\n\nwith UnstructuredClient() as uc_client:\n\n res = uc_client.destinations.create_destination(request={\n \"create_destination_connector\": {\n \"name\": \"\",\n \"type\": shared.DestinationConnectorType.MONGODB,\n \"config\": {\n \"index_name\": \"\",\n \"api_key\": \"\",\n \"namespace\": \"\",\n \"batch_size\": 50,\n },\n },\n })\n\n assert res.destination_connector_information is not None\n\n # Handle response\n print(res.destination_connector_information)" - target: $["paths"]["/api/v1/destinations/{destination_id}"]["delete"] update: "x-codeSamples": @@ -32,7 +32,7 @@ actions: "x-codeSamples": - "lang": "python" "label": "update_destination" - "source": "from unstructured_client import UnstructuredClient\n\n\nwith UnstructuredClient() as uc_client:\n\n res = uc_client.destinations.update_destination(request={\n \"destination_id\": \"9726962d-9d1e-4f84-8787-c7313d183927\",\n \"update_destination_connector\": {\n \"config\": {\n \"bootstrap_servers\": \"\",\n \"port\": 9092,\n \"topic\": \"\",\n \"kafka_api_key\": \"\",\n \"secret\": \"\",\n \"batch_size\": 100,\n },\n },\n })\n\n assert res.destination_connector_information is not None\n\n # Handle response\n print(res.destination_connector_information)" + "source": "from unstructured_client import UnstructuredClient\n\n\nwith UnstructuredClient() as uc_client:\n\n res = uc_client.destinations.update_destination(request={\n \"destination_id\": \"962d9d1e-f847-487c-a731-3d18392716fb\",\n \"update_destination_connector\": {\n \"config\": {\n \"remote_url\": \"https://lined-clamp.info\",\n \"service_account_key\": \"\",\n },\n },\n })\n\n assert res.destination_connector_information is not None\n\n # Handle response\n print(res.destination_connector_information)" - target: $["paths"]["/api/v1/destinations/{destination_id}/connection-check"]["get"] update: "x-codeSamples": @@ -51,6 +51,12 @@ actions: - "lang": "python" "label": "list_jobs" "source": "from unstructured_client import UnstructuredClient\n\n\nwith UnstructuredClient() as uc_client:\n\n res = uc_client.jobs.list_jobs(request={})\n\n assert res.response_list_jobs is not None\n\n # Handle response\n print(res.response_list_jobs)" + - target: $["paths"]["/api/v1/jobs/"]["post"] + update: + "x-codeSamples": + - "lang": "python" + "label": "create_job" + "source": "from unstructured_client import UnstructuredClient\n\n\nwith UnstructuredClient() as uc_client:\n\n res = uc_client.jobs.create_job(request={\n \"body_create_job\": {\n \"request_data\": \"\",\n },\n })\n\n assert res.job_information is not None\n\n # Handle response\n print(res.job_information)" - target: $["paths"]["/api/v1/jobs/{job_id}"]["get"] update: "x-codeSamples": @@ -74,7 +80,7 @@ actions: "x-codeSamples": - "lang": "python" "label": "download_job_output" - "source": "from unstructured_client import UnstructuredClient\n\n\nwith UnstructuredClient() as uc_client:\n\n res = uc_client.jobs.download_job_output(request={\n \"job_id\": \"06d1b7b8-8642-4793-b37e-e45d97d53bc3\",\n \"file_id\": \"\",\n \"node_id\": \"7c8f2aa4-da13-4a04-a98d-0204ea55681e\",\n })\n\n assert res.any is not None\n\n # Handle response\n print(res.any)" + "source": "from unstructured_client import UnstructuredClient\n\n\nwith UnstructuredClient() as uc_client:\n\n res = uc_client.jobs.download_job_output(request={\n \"job_id\": \"06d1b7b8-8642-4793-b37e-e45d97d53bc3\",\n \"file_id\": \"\",\n })\n\n assert res.any is not None\n\n # Handle response\n print(res.any)" - target: $["paths"]["/api/v1/jobs/{job_id}/failed-files"]["get"] update: "x-codeSamples": @@ -123,6 +129,18 @@ actions: - "lang": "python" "label": "create_connection_check_sources" "source": "from unstructured_client import UnstructuredClient\n\n\nwith UnstructuredClient() as uc_client:\n\n res = uc_client.sources.create_connection_check_sources(request={\n \"source_id\": \"8d49e3f2-3e6d-4973-bc61-292af66829d7\",\n })\n\n assert res.dag_node_connection_check is not None\n\n # Handle response\n print(res.dag_node_connection_check)" + - target: $["paths"]["/api/v1/templates/"]["get"] + update: + "x-codeSamples": + - "lang": "python" + "label": "list_templates" + "source": "from unstructured_client import UnstructuredClient\n\n\nwith UnstructuredClient() as uc_client:\n\n res = uc_client.templates.list_templates(request={})\n\n assert res.response_list_templates is not None\n\n # Handle response\n print(res.response_list_templates)" + - target: $["paths"]["/api/v1/templates/{template_id}"]["get"] + update: + "x-codeSamples": + - "lang": "python" + "label": "get_template" + "source": "from unstructured_client import UnstructuredClient\n\n\nwith UnstructuredClient() as uc_client:\n\n res = uc_client.templates.get_template(request={\n \"template_id\": \"\",\n })\n\n assert res.template_detail is not None\n\n # Handle response\n print(res.template_detail)" - target: $["paths"]["/api/v1/workflows/"]["get"] update: "x-codeSamples": @@ -164,4 +182,4 @@ actions: "x-codeSamples": - "lang": "python" "label": "partition" - "source": "from unstructured_client import UnstructuredClient\nfrom unstructured_client.models import shared\n\n\nwith UnstructuredClient() as uc_client:\n\n res = uc_client.general.partition(request={\n \"partition_parameters\": {\n \"files\": {\n \"file_name\": \"example.file\",\n \"content\": open(\"example.file\", \"rb\"),\n },\n \"strategy\": shared.Strategy.AUTO,\n \"vlm_model_provider\": shared.VLMModelProvider.OPENAI,\n \"vlm_model\": shared.VLMModel.GPT_4O,\n \"chunking_strategy\": \"by_title\",\n \"split_pdf_page_range\": [\n 1,\n 10,\n ],\n \"split_pdf_cache_tmp_data_dir\": \"\",\n },\n })\n\n assert res.elements is not None\n\n # Handle response\n print(res.elements)" + "source": "from unstructured_client import UnstructuredClient\nfrom unstructured_client.models import shared\n\n\nwith UnstructuredClient() as uc_client:\n\n res = uc_client.general.partition(request={\n \"partition_parameters\": {\n \"files\": {\n \"file_name\": \"example.file\",\n \"content\": open(\"example.file\", \"rb\"),\n },\n \"strategy\": shared.Strategy.AUTO,\n \"vlm_model_provider\": shared.VLMModelProvider.OPENAI,\n \"vlm_model\": \"gpt-4o\",\n \"chunking_strategy\": \"by_title\",\n \"split_pdf_page_range\": [\n 1,\n 10,\n ],\n \"split_pdf_cache_tmp_data_dir\": \"\",\n },\n })\n\n assert res.elements is not None\n\n # Handle response\n print(res.elements)" diff --git a/docs/models/operations/createjobrequest.md b/docs/models/operations/createjobrequest.md new file mode 100644 index 00000000..421e615d --- /dev/null +++ b/docs/models/operations/createjobrequest.md @@ -0,0 +1,9 @@ +# CreateJobRequest + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | +| `body_create_job` | [shared.BodyCreateJob](../../models/shared/bodycreatejob.md) | :heavy_check_mark: | N/A | +| `unstructured_api_key` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/operations/createjobresponse.md b/docs/models/operations/createjobresponse.md new file mode 100644 index 00000000..acc88bd2 --- /dev/null +++ b/docs/models/operations/createjobresponse.md @@ -0,0 +1,11 @@ +# CreateJobResponse + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | +| `content_type` | *str* | :heavy_check_mark: | HTTP response content type for this operation | +| `job_information` | [Optional[shared.JobInformation]](../../models/shared/jobinformation.md) | :heavy_minus_sign: | Successful Response | +| `status_code` | *int* | :heavy_check_mark: | HTTP response status code for this operation | +| `raw_response` | [httpx.Response](https://www.python-httpx.org/api/#response) | :heavy_check_mark: | Raw HTTP response; suitable for custom response parsing | \ No newline at end of file diff --git a/docs/models/operations/downloadjoboutputrequest.md b/docs/models/operations/downloadjoboutputrequest.md index 26f515a8..3895651a 100644 --- a/docs/models/operations/downloadjoboutputrequest.md +++ b/docs/models/operations/downloadjoboutputrequest.md @@ -3,9 +3,9 @@ ## Fields -| Field | Type | Required | Description | -| ------------------------------------------------- | ------------------------------------------------- | ------------------------------------------------- | ------------------------------------------------- | -| `file_id` | *str* | :heavy_check_mark: | ID of the file to download | -| `job_id` | *str* | :heavy_check_mark: | N/A | -| `node_id` | *str* | :heavy_check_mark: | Node ID to retrieve the corresponding output file | -| `unstructured_api_key` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------ | +| `file_id` | *str* | :heavy_check_mark: | ID of the file to download | +| `job_id` | *str* | :heavy_check_mark: | N/A | +| `node_id` | *OptionalNullable[str]* | :heavy_minus_sign: | Node ID to retrieve the corresponding output file.If not provided, uses the last node in the workflow. | +| `unstructured_api_key` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/operations/gettemplaterequest.md b/docs/models/operations/gettemplaterequest.md new file mode 100644 index 00000000..f37d549d --- /dev/null +++ b/docs/models/operations/gettemplaterequest.md @@ -0,0 +1,9 @@ +# GetTemplateRequest + + +## Fields + +| Field | Type | Required | Description | +| ----------------------- | ----------------------- | ----------------------- | ----------------------- | +| `template_id` | *str* | :heavy_check_mark: | N/A | +| `unstructured_api_key` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/operations/gettemplateresponse.md b/docs/models/operations/gettemplateresponse.md new file mode 100644 index 00000000..9770ccb6 --- /dev/null +++ b/docs/models/operations/gettemplateresponse.md @@ -0,0 +1,11 @@ +# GetTemplateResponse + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | +| `content_type` | *str* | :heavy_check_mark: | HTTP response content type for this operation | +| `status_code` | *int* | :heavy_check_mark: | HTTP response status code for this operation | +| `raw_response` | [httpx.Response](https://www.python-httpx.org/api/#response) | :heavy_check_mark: | Raw HTTP response; suitable for custom response parsing | +| `template_detail` | [Optional[shared.TemplateDetail]](../../models/shared/templatedetail.md) | :heavy_minus_sign: | Successful Response | \ No newline at end of file diff --git a/docs/models/operations/listtemplatesrequest.md b/docs/models/operations/listtemplatesrequest.md new file mode 100644 index 00000000..b7758f78 --- /dev/null +++ b/docs/models/operations/listtemplatesrequest.md @@ -0,0 +1,8 @@ +# ListTemplatesRequest + + +## Fields + +| Field | Type | Required | Description | +| ----------------------- | ----------------------- | ----------------------- | ----------------------- | +| `unstructured_api_key` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/operations/listtemplatesresponse.md b/docs/models/operations/listtemplatesresponse.md new file mode 100644 index 00000000..212d2a77 --- /dev/null +++ b/docs/models/operations/listtemplatesresponse.md @@ -0,0 +1,11 @@ +# ListTemplatesResponse + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | +| `content_type` | *str* | :heavy_check_mark: | HTTP response content type for this operation | +| `response_list_templates` | List[[shared.TemplateListItem](../../models/shared/templatelistitem.md)] | :heavy_minus_sign: | Successful Response | +| `status_code` | *int* | :heavy_check_mark: | HTTP response status code for this operation | +| `raw_response` | [httpx.Response](https://www.python-httpx.org/api/#response) | :heavy_check_mark: | Raw HTTP response; suitable for custom response parsing | \ No newline at end of file diff --git a/docs/models/shared/astradbconnectorconfig.md b/docs/models/shared/astradbconnectorconfig.md index 39b64c01..cf6bad49 100644 --- a/docs/models/shared/astradbconnectorconfig.md +++ b/docs/models/shared/astradbconnectorconfig.md @@ -7,6 +7,7 @@ | ----------------------- | ----------------------- | ----------------------- | ----------------------- | | `api_endpoint` | *str* | :heavy_check_mark: | N/A | | `batch_size` | *int* | :heavy_check_mark: | N/A | +| `binary_encode_vectors` | *Optional[bool]* | :heavy_minus_sign: | N/A | | `collection_name` | *str* | :heavy_check_mark: | N/A | | `keyspace` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | | `token` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/shared/astradbconnectorconfiginput.md b/docs/models/shared/astradbconnectorconfiginput.md index 402be5fe..5977b9c8 100644 --- a/docs/models/shared/astradbconnectorconfiginput.md +++ b/docs/models/shared/astradbconnectorconfiginput.md @@ -7,6 +7,7 @@ | ----------------------- | ----------------------- | ----------------------- | ----------------------- | | `api_endpoint` | *str* | :heavy_check_mark: | N/A | | `batch_size` | *Optional[int]* | :heavy_minus_sign: | N/A | +| `binary_encode_vectors` | *Optional[bool]* | :heavy_minus_sign: | N/A | | `collection_name` | *str* | :heavy_check_mark: | N/A | | `flatten_metadata` | *Optional[bool]* | :heavy_minus_sign: | N/A | | `keyspace` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | diff --git a/docs/models/shared/bodycreatejob.md b/docs/models/shared/bodycreatejob.md new file mode 100644 index 00000000..1632b184 --- /dev/null +++ b/docs/models/shared/bodycreatejob.md @@ -0,0 +1,9 @@ +# BodyCreateJob + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | +| `input_files` | List[[shared.InputFiles](../../models/shared/inputfiles.md)] | :heavy_minus_sign: | N/A | +| `request_data` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/shared/bodyrunworkflow.md b/docs/models/shared/bodyrunworkflow.md index a74be94e..875b87e6 100644 --- a/docs/models/shared/bodyrunworkflow.md +++ b/docs/models/shared/bodyrunworkflow.md @@ -3,6 +3,6 @@ ## Fields -| Field | Type | Required | Description | -| ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | -| `input_files` | List[[shared.InputFiles](../../models/shared/inputfiles.md)] | :heavy_minus_sign: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------ | +| `input_files` | List[[shared.BodyRunWorkflowInputFiles](../../models/shared/bodyrunworkflowinputfiles.md)] | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/shared/bodyrunworkflowinputfiles.md b/docs/models/shared/bodyrunworkflowinputfiles.md new file mode 100644 index 00000000..73943d78 --- /dev/null +++ b/docs/models/shared/bodyrunworkflowinputfiles.md @@ -0,0 +1,10 @@ +# BodyRunWorkflowInputFiles + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------- | -------------------------------------------- | -------------------------------------------- | -------------------------------------------- | +| `content` | *Union[bytes, IO[bytes], io.BufferedReader]* | :heavy_check_mark: | N/A | +| `content_type` | *Optional[str]* | :heavy_minus_sign: | N/A | +| `file_name` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/shared/config.md b/docs/models/shared/config.md index 03abce7e..b6441eb9 100644 --- a/docs/models/shared/config.md +++ b/docs/models/shared/config.md @@ -3,6 +3,12 @@ ## Supported Types +### `shared.AzureDestinationConnectorConfigInput` + +```python +value: shared.AzureDestinationConnectorConfigInput = /* values here */ +``` + ### `shared.AstraDBConnectorConfigInput` ```python diff --git a/docs/models/shared/createworkflow.md b/docs/models/shared/createworkflow.md index b87f290f..a2f5f669 100644 --- a/docs/models/shared/createworkflow.md +++ b/docs/models/shared/createworkflow.md @@ -10,5 +10,6 @@ | `reprocess_all` | *OptionalNullable[bool]* | :heavy_minus_sign: | N/A | | `schedule` | [OptionalNullable[shared.Schedule]](../../models/shared/schedule.md) | :heavy_minus_sign: | N/A | | `source_id` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | +| `template_id` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | | `workflow_nodes` | List[[shared.WorkflowNode](../../models/shared/workflownode.md)] | :heavy_minus_sign: | N/A | | `workflow_type` | [shared.WorkflowType](../../models/shared/workflowtype.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/shared/destinationconnectorinformationconfig.md b/docs/models/shared/destinationconnectorinformationconfig.md index f7871f5a..70b163e8 100644 --- a/docs/models/shared/destinationconnectorinformationconfig.md +++ b/docs/models/shared/destinationconnectorinformationconfig.md @@ -3,6 +3,12 @@ ## Supported Types +### `shared.AzureDestinationConnectorConfig` + +```python +value: shared.AzureDestinationConnectorConfig = /* values here */ +``` + ### `shared.AstraDBConnectorConfig` ```python diff --git a/docs/models/shared/destinationconnectortype.md b/docs/models/shared/destinationconnectortype.md index d0b8f5ab..02159f7f 100644 --- a/docs/models/shared/destinationconnectortype.md +++ b/docs/models/shared/destinationconnectortype.md @@ -5,6 +5,7 @@ | Name | Value | | -------------------------------- | -------------------------------- | +| `AZURE` | azure | | `ASTRADB` | astradb | | `AZURE_AI_SEARCH` | azure_ai_search | | `COUCHBASE` | couchbase | diff --git a/docs/models/shared/nodefilemetadata.md b/docs/models/shared/nodefilemetadata.md index 3f703c2b..78176d73 100644 --- a/docs/models/shared/nodefilemetadata.md +++ b/docs/models/shared/nodefilemetadata.md @@ -6,4 +6,6 @@ | Field | Type | Required | Description | | ------------------ | ------------------ | ------------------ | ------------------ | | `file_id` | *str* | :heavy_check_mark: | N/A | -| `node_id` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file +| `node_id` | *str* | :heavy_check_mark: | N/A | +| `node_subtype` | *str* | :heavy_check_mark: | N/A | +| `node_type` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/shared/partitionparameters.md b/docs/models/shared/partitionparameters.md index 75e52e5e..7202480f 100644 --- a/docs/models/shared/partitionparameters.md +++ b/docs/models/shared/partitionparameters.md @@ -9,6 +9,7 @@ | `combine_under_n_chars` | *OptionalNullable[int]* | :heavy_minus_sign: | If chunking strategy is set, combine elements until a section reaches a length of n chars. Default: 500 | | | `content_type` | *OptionalNullable[str]* | :heavy_minus_sign: | A hint about the content type to use (such as text/markdown), when there are problems processing a specific file. This value is a MIME type in the format type/subtype. | | | `coordinates` | *Optional[bool]* | :heavy_minus_sign: | If `True`, return coordinates for each element extracted via OCR. Default: `False` | | +| `do_not_break_similarity_on_footer_header` | *Optional[bool]* | :heavy_minus_sign: | When `True`, footer, header, and page number are always considered similar to the text before them for chunk by similarity method. This allows chunk by similarity to connect contents across page better. | | | `encoding` | *OptionalNullable[str]* | :heavy_minus_sign: | The encoding method used to decode the text input. Default: utf-8 | | | `extract_image_block_types` | List[*str*] | :heavy_minus_sign: | The types of elements to extract, for use in extracting image blocks as base64 encoded data stored in metadata fields. | | | `files` | [shared.Files](../../models/shared/files.md) | :heavy_check_mark: | The file to extract | {
"summary": "File to be partitioned",
"externalValue": "https://github.com/Unstructured-IO/unstructured/blob/98d3541909f64290b5efb65a226fc3ee8a7cc5ee/example-docs/layout-parser-paper.pdf"
} | @@ -42,6 +43,6 @@ | `strategy` | [Optional[shared.Strategy]](../../models/shared/strategy.md) | :heavy_minus_sign: | The strategy to use for partitioning PDF/image. Options are fast, hi_res, auto. Default: hi_res | auto | | `table_ocr_agent` | *OptionalNullable[str]* | :heavy_minus_sign: | The OCR agent to use for table ocr inference. | | | `unique_element_ids` | *Optional[bool]* | :heavy_minus_sign: | When `True`, assign UUIDs to element IDs, which guarantees their uniqueness (useful when using them as primary keys in database). Otherwise a SHA-256 of element text is used. Default: `False` | | -| `vlm_model` | [Optional[shared.VLMModel]](../../models/shared/vlmmodel.md) | :heavy_minus_sign: | The VLM Model to use. | gpt-4o | +| `vlm_model` | *Optional[str]* | :heavy_minus_sign: | The VLM Model to use. | gpt-4o | | `vlm_model_provider` | [Optional[shared.VLMModelProvider]](../../models/shared/vlmmodelprovider.md) | :heavy_minus_sign: | The VLM Model provider to use. | openai | | `xml_keep_tags` | *Optional[bool]* | :heavy_minus_sign: | If `True`, will retain the XML tags in the output. Otherwise it will simply extract the text from within the tags. Only applies to XML documents. | | \ No newline at end of file diff --git a/docs/models/shared/snowflakedestinationconnectorconfig.md b/docs/models/shared/snowflakedestinationconnectorconfig.md index c2a06ae2..775df4cd 100644 --- a/docs/models/shared/snowflakedestinationconnectorconfig.md +++ b/docs/models/shared/snowflakedestinationconnectorconfig.md @@ -13,6 +13,6 @@ | `port` | *Optional[int]* | :heavy_minus_sign: | N/A | | `record_id_key` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | | `role` | *str* | :heavy_check_mark: | N/A | -| `schema_` | *Optional[str]* | :heavy_minus_sign: | N/A | +| `schema_` | *str* | :heavy_check_mark: | N/A | | `table_name` | *Optional[str]* | :heavy_minus_sign: | N/A | | `user` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/shared/snowflakedestinationconnectorconfiginput.md b/docs/models/shared/snowflakedestinationconnectorconfiginput.md index 1948fc3d..bc116e55 100644 --- a/docs/models/shared/snowflakedestinationconnectorconfiginput.md +++ b/docs/models/shared/snowflakedestinationconnectorconfiginput.md @@ -13,6 +13,6 @@ | `port` | *Optional[int]* | :heavy_minus_sign: | N/A | | `record_id_key` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | | `role` | *str* | :heavy_check_mark: | N/A | -| `schema_` | *Optional[str]* | :heavy_minus_sign: | N/A | +| `schema_` | *str* | :heavy_check_mark: | N/A | | `table_name` | *Optional[str]* | :heavy_minus_sign: | N/A | | `user` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/shared/snowflakesourceconnectorconfig.md b/docs/models/shared/snowflakesourceconnectorconfig.md index 0b996f6c..46642d67 100644 --- a/docs/models/shared/snowflakesourceconnectorconfig.md +++ b/docs/models/shared/snowflakesourceconnectorconfig.md @@ -14,6 +14,6 @@ | `password` | *str* | :heavy_check_mark: | N/A | | `port` | *Optional[int]* | :heavy_minus_sign: | N/A | | `role` | *str* | :heavy_check_mark: | N/A | -| `schema_` | *Optional[str]* | :heavy_minus_sign: | N/A | +| `schema_` | *str* | :heavy_check_mark: | N/A | | `table_name` | *str* | :heavy_check_mark: | N/A | | `user` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/shared/snowflakesourceconnectorconfiginput.md b/docs/models/shared/snowflakesourceconnectorconfiginput.md index 4c65a507..adae4fec 100644 --- a/docs/models/shared/snowflakesourceconnectorconfiginput.md +++ b/docs/models/shared/snowflakesourceconnectorconfiginput.md @@ -14,6 +14,6 @@ | `password` | *str* | :heavy_check_mark: | N/A | | `port` | *Optional[int]* | :heavy_minus_sign: | N/A | | `role` | *str* | :heavy_check_mark: | N/A | -| `schema_` | *Optional[str]* | :heavy_minus_sign: | N/A | +| `schema_` | *str* | :heavy_check_mark: | N/A | | `table_name` | *str* | :heavy_check_mark: | N/A | | `user` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/shared/templatedetail.md b/docs/models/shared/templatedetail.md new file mode 100644 index 00000000..8a205f80 --- /dev/null +++ b/docs/models/shared/templatedetail.md @@ -0,0 +1,15 @@ +# TemplateDetail + +Full template details including nodes. + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------- | ---------------------------------------------------------------- | ---------------------------------------------------------------- | ---------------------------------------------------------------- | +| `description` | *str* | :heavy_check_mark: | N/A | +| `id` | *str* | :heavy_check_mark: | N/A | +| `last_updated` | *str* | :heavy_check_mark: | N/A | +| `name` | *str* | :heavy_check_mark: | N/A | +| `nodes` | List[[shared.TemplateNode](../../models/shared/templatenode.md)] | :heavy_check_mark: | N/A | +| `version` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/shared/templatelistitem.md b/docs/models/shared/templatelistitem.md new file mode 100644 index 00000000..2034a120 --- /dev/null +++ b/docs/models/shared/templatelistitem.md @@ -0,0 +1,14 @@ +# TemplateListItem + +Template metadata for list responses. + + +## Fields + +| Field | Type | Required | Description | +| ------------------ | ------------------ | ------------------ | ------------------ | +| `description` | *str* | :heavy_check_mark: | N/A | +| `id` | *str* | :heavy_check_mark: | N/A | +| `last_updated` | *str* | :heavy_check_mark: | N/A | +| `name` | *str* | :heavy_check_mark: | N/A | +| `version` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/shared/templatenode.md b/docs/models/shared/templatenode.md new file mode 100644 index 00000000..4e649c92 --- /dev/null +++ b/docs/models/shared/templatenode.md @@ -0,0 +1,14 @@ +# TemplateNode + +A node in a template DAG. + + +## Fields + +| Field | Type | Required | Description | +| ------------------ | ------------------ | ------------------ | ------------------ | +| `id` | *str* | :heavy_check_mark: | N/A | +| `name` | *str* | :heavy_check_mark: | N/A | +| `settings` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `subtype` | *str* | :heavy_check_mark: | N/A | +| `type` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/shared/updatedestinationconnectorconfig.md b/docs/models/shared/updatedestinationconnectorconfig.md index a0424473..ea3688cb 100644 --- a/docs/models/shared/updatedestinationconnectorconfig.md +++ b/docs/models/shared/updatedestinationconnectorconfig.md @@ -3,6 +3,12 @@ ## Supported Types +### `shared.AzureDestinationConnectorConfigInput` + +```python +value: shared.AzureDestinationConnectorConfigInput = /* values here */ +``` + ### `shared.AstraDBConnectorConfigInput` ```python diff --git a/docs/models/shared/updateworkflow.md b/docs/models/shared/updateworkflow.md index 370b773c..5f3b2014 100644 --- a/docs/models/shared/updateworkflow.md +++ b/docs/models/shared/updateworkflow.md @@ -10,5 +10,6 @@ | `reprocess_all` | *OptionalNullable[bool]* | :heavy_minus_sign: | N/A | | `schedule` | [OptionalNullable[shared.UpdateWorkflowSchedule]](../../models/shared/updateworkflowschedule.md) | :heavy_minus_sign: | N/A | | `source_id` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | +| `template_id` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | | `workflow_nodes` | List[[shared.WorkflowNode](../../models/shared/workflownode.md)] | :heavy_minus_sign: | N/A | | `workflow_type` | [OptionalNullable[shared.WorkflowType]](../../models/shared/workflowtype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/shared/vlmmodel.md b/docs/models/shared/vlmmodel.md deleted file mode 100644 index 0a2e7ab4..00000000 --- a/docs/models/shared/vlmmodel.md +++ /dev/null @@ -1,23 +0,0 @@ -# VLMModel - -The VLM Model to use. - - -## Values - -| Name | Value | -| ---------------------------------------------- | ---------------------------------------------- | -| `CLAUDE_3_5_SONNET_20241022` | claude-3-5-sonnet-20241022 | -| `CLAUDE_3_7_SONNET_20250219` | claude-3-7-sonnet-20250219 | -| `GPT_4O` | gpt-4o | -| `GEMINI_1_5_PRO` | gemini-1.5-pro | -| `US_AMAZON_NOVA_PRO_V1_0` | us.amazon.nova-pro-v1:0 | -| `US_AMAZON_NOVA_LITE_V1_0` | us.amazon.nova-lite-v1:0 | -| `US_ANTHROPIC_CLAUDE_3_7_SONNET_20250219_V1_0` | us.anthropic.claude-3-7-sonnet-20250219-v1:0 | -| `US_ANTHROPIC_CLAUDE_3_5_SONNET_20241022_V2_0` | us.anthropic.claude-3-5-sonnet-20241022-v2:0 | -| `US_ANTHROPIC_CLAUDE_3_OPUS_20240229_V1_0` | us.anthropic.claude-3-opus-20240229-v1:0 | -| `US_ANTHROPIC_CLAUDE_3_HAIKU_20240307_V1_0` | us.anthropic.claude-3-haiku-20240307-v1:0 | -| `US_ANTHROPIC_CLAUDE_3_SONNET_20240229_V1_0` | us.anthropic.claude-3-sonnet-20240229-v1:0 | -| `US_META_LLAMA3_2_90B_INSTRUCT_V1_0` | us.meta.llama3-2-90b-instruct-v1:0 | -| `US_META_LLAMA3_2_11B_INSTRUCT_V1_0` | us.meta.llama3-2-11b-instruct-v1:0 | -| `GEMINI_2_0_FLASH_001` | gemini-2.0-flash-001 | \ No newline at end of file diff --git a/docs/models/shared/workflowinformation.md b/docs/models/shared/workflowinformation.md index d9fe6c77..f8c4b867 100644 --- a/docs/models/shared/workflowinformation.md +++ b/docs/models/shared/workflowinformation.md @@ -9,7 +9,7 @@ | `destinations` | List[*str*] | :heavy_check_mark: | N/A | | | `id` | *str* | :heavy_check_mark: | N/A | | | `name` | *str* | :heavy_check_mark: | N/A | | -| `reprocess_all` | *OptionalNullable[bool]* | :heavy_minus_sign: | N/A | | +| `reprocess_all` | *Optional[bool]* | :heavy_minus_sign: | N/A | | | `schedule` | [OptionalNullable[shared.WorkflowSchedule]](../../models/shared/workflowschedule.md) | :heavy_minus_sign: | N/A | {
"crontab_entries": [
{
"cron_expression": "0 0 * * *"
}
]
} | | `sources` | List[*str*] | :heavy_check_mark: | N/A | | | `status` | [shared.WorkflowState](../../models/shared/workflowstate.md) | :heavy_check_mark: | N/A | | diff --git a/docs/models/shared/workflowjobtype.md b/docs/models/shared/workflowjobtype.md index 5ad02fc0..b106b4f6 100644 --- a/docs/models/shared/workflowjobtype.md +++ b/docs/models/shared/workflowjobtype.md @@ -7,4 +7,5 @@ | ------------ | ------------ | | `EPHEMERAL` | ephemeral | | `PERSISTENT` | persistent | -| `SCHEDULED` | scheduled | \ No newline at end of file +| `SCHEDULED` | scheduled | +| `TEMPLATE` | template | \ No newline at end of file diff --git a/docs/sdks/general/README.md b/docs/sdks/general/README.md index 1eee2b9e..4d900e4e 100644 --- a/docs/sdks/general/README.md +++ b/docs/sdks/general/README.md @@ -34,7 +34,7 @@ with UnstructuredClient() as uc_client: 10, ], "strategy": shared.Strategy.AUTO, - "vlm_model": shared.VLMModel.GPT_4O, + "vlm_model": "gpt-4o", "vlm_model_provider": shared.VLMModelProvider.OPENAI, }, }) diff --git a/docs/sdks/jobs/README.md b/docs/sdks/jobs/README.md index feddfe3b..eabb0931 100644 --- a/docs/sdks/jobs/README.md +++ b/docs/sdks/jobs/README.md @@ -6,6 +6,7 @@ ### Available Operations * [cancel_job](#cancel_job) - Cancel Job +* [create_job](#create_job) - Create Job * [download_job_output](#download_job_output) - Download Job output * [get_job](#get_job) - Get Job * [get_job_details](#get_job_details) - Get Job processing details @@ -54,6 +55,50 @@ with UnstructuredClient() as uc_client: | errors.HTTPValidationError | 422 | application/json | | errors.SDKError | 4XX, 5XX | \*/\* | +## create_job + +Create a new on-demand job using either a template (with persistent job optimization) or custom DAG. + +### Example Usage + + +```python +from unstructured_client import UnstructuredClient + + +with UnstructuredClient() as uc_client: + + res = uc_client.jobs.create_job(request={ + "body_create_job": { + "request_data": "", + }, + }) + + assert res.job_information is not None + + # Handle response + print(res.job_information) + +``` + +### Parameters + +| Parameter | Type | Required | Description | +| -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | +| `request` | [operations.CreateJobRequest](../../models/operations/createjobrequest.md) | :heavy_check_mark: | The request object to use for the request. | +| `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | + +### Response + +**[operations.CreateJobResponse](../../models/operations/createjobresponse.md)** + +### Errors + +| Error Type | Status Code | Content Type | +| -------------------------- | -------------------------- | -------------------------- | +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | + ## download_job_output Download the output of a job from a workflow where the input file was provided at runtime. diff --git a/docs/sdks/templates/README.md b/docs/sdks/templates/README.md new file mode 100644 index 00000000..7354619f --- /dev/null +++ b/docs/sdks/templates/README.md @@ -0,0 +1,91 @@ +# Templates +(*templates*) + +## Overview + +### Available Operations + +* [get_template](#get_template) - Get Template +* [list_templates](#list_templates) - List Templates + +## get_template + +Retrieve detailed information and DAG for a specific template. + +### Example Usage + + +```python +from unstructured_client import UnstructuredClient + + +with UnstructuredClient() as uc_client: + + res = uc_client.templates.get_template(request={ + "template_id": "", + }) + + assert res.template_detail is not None + + # Handle response + print(res.template_detail) + +``` + +### Parameters + +| Parameter | Type | Required | Description | +| ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | +| `request` | [operations.GetTemplateRequest](../../models/operations/gettemplaterequest.md) | :heavy_check_mark: | The request object to use for the request. | +| `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | + +### Response + +**[operations.GetTemplateResponse](../../models/operations/gettemplateresponse.md)** + +### Errors + +| Error Type | Status Code | Content Type | +| -------------------------- | -------------------------- | -------------------------- | +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | + +## list_templates + +Retrieve a list of available templates with their metadata. + +### Example Usage + + +```python +from unstructured_client import UnstructuredClient + + +with UnstructuredClient() as uc_client: + + res = uc_client.templates.list_templates(request={}) + + assert res.response_list_templates is not None + + # Handle response + print(res.response_list_templates) + +``` + +### Parameters + +| Parameter | Type | Required | Description | +| ---------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------- | +| `request` | [operations.ListTemplatesRequest](../../models/operations/listtemplatesrequest.md) | :heavy_check_mark: | The request object to use for the request. | +| `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | + +### Response + +**[operations.ListTemplatesResponse](../../models/operations/listtemplatesresponse.md)** + +### Errors + +| Error Type | Status Code | Content Type | +| -------------------------- | -------------------------- | -------------------------- | +| errors.HTTPValidationError | 422 | application/json | +| errors.SDKError | 4XX, 5XX | \*/\* | \ No newline at end of file diff --git a/gen.yaml b/gen.yaml index 7cfcae60..ec6cbe49 100644 --- a/gen.yaml +++ b/gen.yaml @@ -17,6 +17,10 @@ generation: generateTests: true generateNewTests: false skipResponseBodyAssertions: false + persistentEdits: {} + requestBodyFieldName: "" + schemas: + allOfMergeStrategy: shallowMerge python: version: 0.42.4 additionalDependencies: @@ -36,10 +40,15 @@ python: httpx: '>=0.27.0' pypdf: '>= 6.2.0' requests-toolbelt: '>=1.0.0' + allowedRedefinedBuiltins: + - id + - object + asyncMode: both authors: - Unstructured baseErrorName: UnstructuredClientError clientServerStatusCodesAsErrors: true + constFieldCasing: upper defaultErrorName: SDKError description: Python Client SDK for Unstructured API enableCustomCodeRegions: true @@ -57,16 +66,20 @@ python: operations: models/operations shared: models/shared webhooks: models/webhooks + inferUnionDiscriminators: true inputModelSuffix: input license: MIT maxMethodParams: 0 methodArguments: require-security-and-request moduleName: "" + multipartArrayFormat: legacy outputModelSuffix: output packageManager: poetry packageName: unstructured-client + preApplyUnionDiscriminators: false projectUrls: {} pytestFilterWarnings: [] pytestTimeout: 0 responseFormat: envelope + sseFlatResponse: false templateVersion: v2 diff --git a/poetry.lock b/poetry.lock index ad4817bf..6e93a276 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand. [[package]] name = "aiofiles" diff --git a/pyproject.toml b/pyproject.toml index dde0a3dd..92e7a302 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,3 +1,4 @@ + [project] name = "unstructured-client" version = "0.42.4" diff --git a/src/unstructured_client/_version.py b/src/unstructured_client/_version.py index f319ac07..da027fa4 100644 --- a/src/unstructured_client/_version.py +++ b/src/unstructured_client/_version.py @@ -4,9 +4,9 @@ __title__: str = "unstructured-client" __version__: str = "0.42.4" -__openapi_doc_version__: str = "1.2.10" -__gen_version__: str = "2.674.3" -__user_agent__: str = "speakeasy-sdk/python 0.42.4 2.674.3 1.2.10 unstructured-client" +__openapi_doc_version__: str = "1.2.21" +__gen_version__: str = "2.680.0" +__user_agent__: str = "speakeasy-sdk/python 0.42.4 2.680.0 1.2.21 unstructured-client" try: if __package__ is not None: diff --git a/src/unstructured_client/jobs.py b/src/unstructured_client/jobs.py index ba620032..6affab2e 100644 --- a/src/unstructured_client/jobs.py +++ b/src/unstructured_client/jobs.py @@ -206,6 +206,212 @@ async def cancel_job_async( raise errors.SDKError("Unexpected response received", http_res) + def create_job( + self, + *, + request: Union[ + operations.CreateJobRequest, operations.CreateJobRequestTypedDict + ], + retries: OptionalNullable[utils.RetryConfig] = UNSET, + server_url: Optional[str] = None, + timeout_ms: Optional[int] = None, + http_headers: Optional[Mapping[str, str]] = None, + ) -> operations.CreateJobResponse: + r"""Create Job + + Create a new on-demand job using either a template (with persistent job optimization) or custom DAG. + + :param request: The request object to send. + :param retries: Override the default retry configuration for this method + :param server_url: Override the default server URL for this method + :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + :param http_headers: Additional headers to set or replace on requests. + """ + base_url = None + url_variables = None + if timeout_ms is None: + timeout_ms = self.sdk_configuration.timeout_ms + + if server_url is not None: + base_url = server_url + else: + base_url = self._get_url(base_url, url_variables) + + if not isinstance(request, BaseModel): + request = utils.unmarshal(request, operations.CreateJobRequest) + request = cast(operations.CreateJobRequest, request) + + req = self._build_request( + method="POST", + path="/api/v1/jobs/", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=True, + request_has_path_params=False, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + http_headers=http_headers, + security=self.sdk_configuration.security, + get_serialized_body=lambda: utils.serialize_request_body( + request.body_create_job, False, False, "multipart", shared.BodyCreateJob + ), + timeout_ms=timeout_ms, + ) + + if retries == UNSET: + if self.sdk_configuration.retry_config is not UNSET: + retries = self.sdk_configuration.retry_config + else: + retries = utils.RetryConfig( + "backoff", utils.BackoffStrategy(3000, 720000, 1.88, 1800000), True + ) + + retry_config = None + if isinstance(retries, utils.RetryConfig): + retry_config = (retries, ["5xx"]) + + http_res = self.do_request( + hook_ctx=HookContext( + config=self.sdk_configuration, + base_url=base_url or "", + operation_id="create_job", + oauth2_scopes=[], + security_source=self.sdk_configuration.security, + ), + request=req, + error_status_codes=["422", "4XX", "5XX"], + retry_config=retry_config, + ) + + response_data: Any = None + if utils.match_response(http_res, "200", "application/json"): + return operations.CreateJobResponse( + job_information=unmarshal_json_response( + Optional[shared.JobInformation], http_res + ), + status_code=http_res.status_code, + content_type=http_res.headers.get("Content-Type") or "", + raw_response=http_res, + ) + if utils.match_response(http_res, "422", "application/json"): + response_data = unmarshal_json_response( + errors.HTTPValidationErrorData, http_res + ) + raise errors.HTTPValidationError(response_data, http_res) + if utils.match_response(http_res, "4XX", "*"): + http_res_text = utils.stream_to_text(http_res) + raise errors.SDKError("API error occurred", http_res, http_res_text) + if utils.match_response(http_res, "5XX", "*"): + http_res_text = utils.stream_to_text(http_res) + raise errors.SDKError("API error occurred", http_res, http_res_text) + + raise errors.SDKError("Unexpected response received", http_res) + + async def create_job_async( + self, + *, + request: Union[ + operations.CreateJobRequest, operations.CreateJobRequestTypedDict + ], + retries: OptionalNullable[utils.RetryConfig] = UNSET, + server_url: Optional[str] = None, + timeout_ms: Optional[int] = None, + http_headers: Optional[Mapping[str, str]] = None, + ) -> operations.CreateJobResponse: + r"""Create Job + + Create a new on-demand job using either a template (with persistent job optimization) or custom DAG. + + :param request: The request object to send. + :param retries: Override the default retry configuration for this method + :param server_url: Override the default server URL for this method + :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + :param http_headers: Additional headers to set or replace on requests. + """ + base_url = None + url_variables = None + if timeout_ms is None: + timeout_ms = self.sdk_configuration.timeout_ms + + if server_url is not None: + base_url = server_url + else: + base_url = self._get_url(base_url, url_variables) + + if not isinstance(request, BaseModel): + request = utils.unmarshal(request, operations.CreateJobRequest) + request = cast(operations.CreateJobRequest, request) + + req = self._build_request_async( + method="POST", + path="/api/v1/jobs/", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=True, + request_has_path_params=False, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + http_headers=http_headers, + security=self.sdk_configuration.security, + get_serialized_body=lambda: utils.serialize_request_body( + request.body_create_job, False, False, "multipart", shared.BodyCreateJob + ), + timeout_ms=timeout_ms, + ) + + if retries == UNSET: + if self.sdk_configuration.retry_config is not UNSET: + retries = self.sdk_configuration.retry_config + else: + retries = utils.RetryConfig( + "backoff", utils.BackoffStrategy(3000, 720000, 1.88, 1800000), True + ) + + retry_config = None + if isinstance(retries, utils.RetryConfig): + retry_config = (retries, ["5xx"]) + + http_res = await self.do_request_async( + hook_ctx=HookContext( + config=self.sdk_configuration, + base_url=base_url or "", + operation_id="create_job", + oauth2_scopes=[], + security_source=self.sdk_configuration.security, + ), + request=req, + error_status_codes=["422", "4XX", "5XX"], + retry_config=retry_config, + ) + + response_data: Any = None + if utils.match_response(http_res, "200", "application/json"): + return operations.CreateJobResponse( + job_information=unmarshal_json_response( + Optional[shared.JobInformation], http_res + ), + status_code=http_res.status_code, + content_type=http_res.headers.get("Content-Type") or "", + raw_response=http_res, + ) + if utils.match_response(http_res, "422", "application/json"): + response_data = unmarshal_json_response( + errors.HTTPValidationErrorData, http_res + ) + raise errors.HTTPValidationError(response_data, http_res) + if utils.match_response(http_res, "4XX", "*"): + http_res_text = await utils.stream_to_text_async(http_res) + raise errors.SDKError("API error occurred", http_res, http_res_text) + if utils.match_response(http_res, "5XX", "*"): + http_res_text = await utils.stream_to_text_async(http_res) + raise errors.SDKError("API error occurred", http_res, http_res_text) + + raise errors.SDKError("Unexpected response received", http_res) + def download_job_output( self, *, diff --git a/src/unstructured_client/models/operations/__init__.py b/src/unstructured_client/models/operations/__init__.py index 8c6fbfc2..e5a7db5d 100644 --- a/src/unstructured_client/models/operations/__init__.py +++ b/src/unstructured_client/models/operations/__init__.py @@ -29,6 +29,12 @@ CreateDestinationResponse, CreateDestinationResponseTypedDict, ) + from .create_job import ( + CreateJobRequest, + CreateJobRequestTypedDict, + CreateJobResponse, + CreateJobResponseTypedDict, + ) from .create_source import ( CreateSourceRequest, CreateSourceRequestTypedDict, @@ -107,6 +113,12 @@ GetSourceResponse, GetSourceResponseTypedDict, ) + from .get_template import ( + GetTemplateRequest, + GetTemplateRequestTypedDict, + GetTemplateResponse, + GetTemplateResponseTypedDict, + ) from .get_workflow import ( GetWorkflowRequest, GetWorkflowRequestTypedDict, @@ -131,6 +143,12 @@ ListSourcesResponse, ListSourcesResponseTypedDict, ) + from .list_templates import ( + ListTemplatesRequest, + ListTemplatesRequestTypedDict, + ListTemplatesResponse, + ListTemplatesResponseTypedDict, + ) from .list_workflows import ( ListWorkflowsRequest, ListWorkflowsRequestTypedDict, @@ -185,6 +203,10 @@ "CreateDestinationRequestTypedDict", "CreateDestinationResponse", "CreateDestinationResponseTypedDict", + "CreateJobRequest", + "CreateJobRequestTypedDict", + "CreateJobResponse", + "CreateJobResponseTypedDict", "CreateSourceRequest", "CreateSourceRequestTypedDict", "CreateSourceResponse", @@ -237,6 +259,10 @@ "GetSourceRequestTypedDict", "GetSourceResponse", "GetSourceResponseTypedDict", + "GetTemplateRequest", + "GetTemplateRequestTypedDict", + "GetTemplateResponse", + "GetTemplateResponseTypedDict", "GetWorkflowRequest", "GetWorkflowRequestTypedDict", "GetWorkflowResponse", @@ -253,6 +279,10 @@ "ListSourcesRequestTypedDict", "ListSourcesResponse", "ListSourcesResponseTypedDict", + "ListTemplatesRequest", + "ListTemplatesRequestTypedDict", + "ListTemplatesResponse", + "ListTemplatesResponseTypedDict", "ListWorkflowsRequest", "ListWorkflowsRequestTypedDict", "ListWorkflowsResponse", @@ -296,6 +326,10 @@ "CreateDestinationRequestTypedDict": ".create_destination", "CreateDestinationResponse": ".create_destination", "CreateDestinationResponseTypedDict": ".create_destination", + "CreateJobRequest": ".create_job", + "CreateJobRequestTypedDict": ".create_job", + "CreateJobResponse": ".create_job", + "CreateJobResponseTypedDict": ".create_job", "CreateSourceRequest": ".create_source", "CreateSourceRequestTypedDict": ".create_source", "CreateSourceResponse": ".create_source", @@ -348,6 +382,10 @@ "GetSourceRequestTypedDict": ".get_source", "GetSourceResponse": ".get_source", "GetSourceResponseTypedDict": ".get_source", + "GetTemplateRequest": ".get_template", + "GetTemplateRequestTypedDict": ".get_template", + "GetTemplateResponse": ".get_template", + "GetTemplateResponseTypedDict": ".get_template", "GetWorkflowRequest": ".get_workflow", "GetWorkflowRequestTypedDict": ".get_workflow", "GetWorkflowResponse": ".get_workflow", @@ -364,6 +402,10 @@ "ListSourcesRequestTypedDict": ".list_sources", "ListSourcesResponse": ".list_sources", "ListSourcesResponseTypedDict": ".list_sources", + "ListTemplatesRequest": ".list_templates", + "ListTemplatesRequestTypedDict": ".list_templates", + "ListTemplatesResponse": ".list_templates", + "ListTemplatesResponseTypedDict": ".list_templates", "ListWorkflowsRequest": ".list_workflows", "ListWorkflowsRequestTypedDict": ".list_workflows", "ListWorkflowsResponse": ".list_workflows", diff --git a/src/unstructured_client/models/operations/create_job.py b/src/unstructured_client/models/operations/create_job.py new file mode 100644 index 00000000..906275c6 --- /dev/null +++ b/src/unstructured_client/models/operations/create_job.py @@ -0,0 +1,93 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import httpx +import pydantic +from pydantic import model_serializer +from typing import Optional +from typing_extensions import Annotated, NotRequired, TypedDict +from unstructured_client.models.shared import ( + body_create_job as shared_body_create_job, + jobinformation as shared_jobinformation, +) +from unstructured_client.types import ( + BaseModel, + Nullable, + OptionalNullable, + UNSET, + UNSET_SENTINEL, +) +from unstructured_client.utils import FieldMetadata, HeaderMetadata, RequestMetadata + + +class CreateJobRequestTypedDict(TypedDict): + body_create_job: shared_body_create_job.BodyCreateJobTypedDict + unstructured_api_key: NotRequired[Nullable[str]] + + +class CreateJobRequest(BaseModel): + body_create_job: Annotated[ + shared_body_create_job.BodyCreateJob, + FieldMetadata(request=RequestMetadata(media_type="multipart/form-data")), + ] + + unstructured_api_key: Annotated[ + OptionalNullable[str], + pydantic.Field(alias="unstructured-api-key"), + FieldMetadata(header=HeaderMetadata(style="simple", explode=False)), + ] = UNSET + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = ["unstructured-api-key"] + nullable_fields = ["unstructured-api-key"] + null_default_fields = [] + + serialized = handler(self) + + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + serialized.pop(k, None) + + optional_nullable = k in optional_fields and k in nullable_fields + is_set = ( + self.__pydantic_fields_set__.intersection({n}) + or k in null_default_fields + ) # pylint: disable=no-member + + if val is not None and val != UNSET_SENTINEL: + m[k] = val + elif val != UNSET_SENTINEL and ( + not k in optional_fields or (optional_nullable and is_set) + ): + m[k] = val + + return m + + +class CreateJobResponseTypedDict(TypedDict): + content_type: str + r"""HTTP response content type for this operation""" + status_code: int + r"""HTTP response status code for this operation""" + raw_response: httpx.Response + r"""Raw HTTP response; suitable for custom response parsing""" + job_information: NotRequired[shared_jobinformation.JobInformationTypedDict] + r"""Successful Response""" + + +class CreateJobResponse(BaseModel): + content_type: str + r"""HTTP response content type for this operation""" + + status_code: int + r"""HTTP response status code for this operation""" + + raw_response: httpx.Response + r"""Raw HTTP response; suitable for custom response parsing""" + + job_information: Optional[shared_jobinformation.JobInformation] = None + r"""Successful Response""" diff --git a/src/unstructured_client/models/operations/download_job_output.py b/src/unstructured_client/models/operations/download_job_output.py index c11e2837..aaec9012 100644 --- a/src/unstructured_client/models/operations/download_job_output.py +++ b/src/unstructured_client/models/operations/download_job_output.py @@ -25,8 +25,8 @@ class DownloadJobOutputRequestTypedDict(TypedDict): file_id: str r"""ID of the file to download""" job_id: str - node_id: str - r"""Node ID to retrieve the corresponding output file""" + node_id: NotRequired[Nullable[str]] + r"""Node ID to retrieve the corresponding output file.If not provided, uses the last node in the workflow.""" unstructured_api_key: NotRequired[Nullable[str]] @@ -41,9 +41,10 @@ class DownloadJobOutputRequest(BaseModel): ] node_id: Annotated[ - str, FieldMetadata(query=QueryParamMetadata(style="form", explode=True)) - ] - r"""Node ID to retrieve the corresponding output file""" + OptionalNullable[str], + FieldMetadata(query=QueryParamMetadata(style="form", explode=True)), + ] = UNSET + r"""Node ID to retrieve the corresponding output file.If not provided, uses the last node in the workflow.""" unstructured_api_key: Annotated[ OptionalNullable[str], @@ -53,8 +54,8 @@ class DownloadJobOutputRequest(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["unstructured-api-key"] - nullable_fields = ["unstructured-api-key"] + optional_fields = ["node_id", "unstructured-api-key"] + nullable_fields = ["node_id", "unstructured-api-key"] null_default_fields = [] serialized = handler(self) diff --git a/src/unstructured_client/models/operations/get_template.py b/src/unstructured_client/models/operations/get_template.py new file mode 100644 index 00000000..33c5d6f5 --- /dev/null +++ b/src/unstructured_client/models/operations/get_template.py @@ -0,0 +1,89 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import httpx +import pydantic +from pydantic import model_serializer +from typing import Optional +from typing_extensions import Annotated, NotRequired, TypedDict +from unstructured_client.models.shared import templatedetail as shared_templatedetail +from unstructured_client.types import ( + BaseModel, + Nullable, + OptionalNullable, + UNSET, + UNSET_SENTINEL, +) +from unstructured_client.utils import FieldMetadata, HeaderMetadata, PathParamMetadata + + +class GetTemplateRequestTypedDict(TypedDict): + template_id: str + unstructured_api_key: NotRequired[Nullable[str]] + + +class GetTemplateRequest(BaseModel): + template_id: Annotated[ + str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) + ] + + unstructured_api_key: Annotated[ + OptionalNullable[str], + pydantic.Field(alias="unstructured-api-key"), + FieldMetadata(header=HeaderMetadata(style="simple", explode=False)), + ] = UNSET + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = ["unstructured-api-key"] + nullable_fields = ["unstructured-api-key"] + null_default_fields = [] + + serialized = handler(self) + + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + serialized.pop(k, None) + + optional_nullable = k in optional_fields and k in nullable_fields + is_set = ( + self.__pydantic_fields_set__.intersection({n}) + or k in null_default_fields + ) # pylint: disable=no-member + + if val is not None and val != UNSET_SENTINEL: + m[k] = val + elif val != UNSET_SENTINEL and ( + not k in optional_fields or (optional_nullable and is_set) + ): + m[k] = val + + return m + + +class GetTemplateResponseTypedDict(TypedDict): + content_type: str + r"""HTTP response content type for this operation""" + status_code: int + r"""HTTP response status code for this operation""" + raw_response: httpx.Response + r"""Raw HTTP response; suitable for custom response parsing""" + template_detail: NotRequired[shared_templatedetail.TemplateDetailTypedDict] + r"""Successful Response""" + + +class GetTemplateResponse(BaseModel): + content_type: str + r"""HTTP response content type for this operation""" + + status_code: int + r"""HTTP response status code for this operation""" + + raw_response: httpx.Response + r"""Raw HTTP response; suitable for custom response parsing""" + + template_detail: Optional[shared_templatedetail.TemplateDetail] = None + r"""Successful Response""" diff --git a/src/unstructured_client/models/operations/list_templates.py b/src/unstructured_client/models/operations/list_templates.py new file mode 100644 index 00000000..d07ed096 --- /dev/null +++ b/src/unstructured_client/models/operations/list_templates.py @@ -0,0 +1,90 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import httpx +import pydantic +from pydantic import model_serializer +from typing import List, Optional +from typing_extensions import Annotated, NotRequired, TypedDict +from unstructured_client.models.shared import ( + templatelistitem as shared_templatelistitem, +) +from unstructured_client.types import ( + BaseModel, + Nullable, + OptionalNullable, + UNSET, + UNSET_SENTINEL, +) +from unstructured_client.utils import FieldMetadata, HeaderMetadata + + +class ListTemplatesRequestTypedDict(TypedDict): + unstructured_api_key: NotRequired[Nullable[str]] + + +class ListTemplatesRequest(BaseModel): + unstructured_api_key: Annotated[ + OptionalNullable[str], + pydantic.Field(alias="unstructured-api-key"), + FieldMetadata(header=HeaderMetadata(style="simple", explode=False)), + ] = UNSET + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = ["unstructured-api-key"] + nullable_fields = ["unstructured-api-key"] + null_default_fields = [] + + serialized = handler(self) + + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + serialized.pop(k, None) + + optional_nullable = k in optional_fields and k in nullable_fields + is_set = ( + self.__pydantic_fields_set__.intersection({n}) + or k in null_default_fields + ) # pylint: disable=no-member + + if val is not None and val != UNSET_SENTINEL: + m[k] = val + elif val != UNSET_SENTINEL and ( + not k in optional_fields or (optional_nullable and is_set) + ): + m[k] = val + + return m + + +class ListTemplatesResponseTypedDict(TypedDict): + content_type: str + r"""HTTP response content type for this operation""" + status_code: int + r"""HTTP response status code for this operation""" + raw_response: httpx.Response + r"""Raw HTTP response; suitable for custom response parsing""" + response_list_templates: NotRequired[ + List[shared_templatelistitem.TemplateListItemTypedDict] + ] + r"""Successful Response""" + + +class ListTemplatesResponse(BaseModel): + content_type: str + r"""HTTP response content type for this operation""" + + status_code: int + r"""HTTP response status code for this operation""" + + raw_response: httpx.Response + r"""Raw HTTP response; suitable for custom response parsing""" + + response_list_templates: Optional[ + List[shared_templatelistitem.TemplateListItem] + ] = None + r"""Successful Response""" diff --git a/src/unstructured_client/models/shared/__init__.py b/src/unstructured_client/models/shared/__init__.py index 0ab9d4af..6fad0b49 100644 --- a/src/unstructured_client/models/shared/__init__.py +++ b/src/unstructured_client/models/shared/__init__.py @@ -21,6 +21,14 @@ AzureAISearchConnectorConfigInput, AzureAISearchConnectorConfigInputTypedDict, ) + from .azuredestinationconnectorconfig import ( + AzureDestinationConnectorConfig, + AzureDestinationConnectorConfigTypedDict, + ) + from .azuredestinationconnectorconfiginput import ( + AzureDestinationConnectorConfigInput, + AzureDestinationConnectorConfigInputTypedDict, + ) from .azuresourceconnectorconfig import ( AzureSourceConnectorConfig, AzureSourceConnectorConfigTypedDict, @@ -29,11 +37,17 @@ AzureSourceConnectorConfigInput, AzureSourceConnectorConfigInputTypedDict, ) + from .body_create_job import ( + BodyCreateJob, + BodyCreateJobTypedDict, + InputFiles, + InputFilesTypedDict, + ) from .body_run_workflow import ( BodyRunWorkflow, + BodyRunWorkflowInputFiles, + BodyRunWorkflowInputFilesTypedDict, BodyRunWorkflowTypedDict, - InputFiles, - InputFilesTypedDict, ) from .boxsourceconnectorconfig import ( BoxSourceConnectorConfig, @@ -257,7 +271,6 @@ PartitionParameters, PartitionParametersTypedDict, Strategy, - VLMModel, VLMModelProvider, ) from .pineconedestinationconnectorconfig import ( @@ -358,6 +371,9 @@ SourceConnectorInformationTypedDict, ) from .sourceconnectortype import SourceConnectorType + from .templatedetail import TemplateDetail, TemplateDetailTypedDict + from .templatelistitem import TemplateListItem, TemplateListItemTypedDict + from .templatenode import TemplateNode, TemplateNodeTypedDict from .updatedestinationconnector import ( UpdateDestinationConnector, UpdateDestinationConnectorConfig, @@ -413,11 +429,19 @@ "AzureAISearchConnectorConfigInput", "AzureAISearchConnectorConfigInputTypedDict", "AzureAISearchConnectorConfigTypedDict", + "AzureDestinationConnectorConfig", + "AzureDestinationConnectorConfigInput", + "AzureDestinationConnectorConfigInputTypedDict", + "AzureDestinationConnectorConfigTypedDict", "AzureSourceConnectorConfig", "AzureSourceConnectorConfigInput", "AzureSourceConnectorConfigInputTypedDict", "AzureSourceConnectorConfigTypedDict", + "BodyCreateJob", + "BodyCreateJobTypedDict", "BodyRunWorkflow", + "BodyRunWorkflowInputFiles", + "BodyRunWorkflowInputFilesTypedDict", "BodyRunWorkflowTypedDict", "BoxSourceConnectorConfig", "BoxSourceConnectorConfigInput", @@ -611,6 +635,12 @@ "SourceConnectorInformationTypedDict", "SourceConnectorType", "Strategy", + "TemplateDetail", + "TemplateDetailTypedDict", + "TemplateListItem", + "TemplateListItemTypedDict", + "TemplateNode", + "TemplateNodeTypedDict", "UpdateDestinationConnector", "UpdateDestinationConnectorConfig", "UpdateDestinationConnectorConfigTypedDict", @@ -622,7 +652,6 @@ "UpdateWorkflow", "UpdateWorkflowSchedule", "UpdateWorkflowTypedDict", - "VLMModel", "VLMModelProvider", "ValidationError", "ValidationErrorTypedDict", @@ -654,14 +683,22 @@ "AzureAISearchConnectorConfigTypedDict": ".azureaisearchconnectorconfig", "AzureAISearchConnectorConfigInput": ".azureaisearchconnectorconfiginput", "AzureAISearchConnectorConfigInputTypedDict": ".azureaisearchconnectorconfiginput", + "AzureDestinationConnectorConfig": ".azuredestinationconnectorconfig", + "AzureDestinationConnectorConfigTypedDict": ".azuredestinationconnectorconfig", + "AzureDestinationConnectorConfigInput": ".azuredestinationconnectorconfiginput", + "AzureDestinationConnectorConfigInputTypedDict": ".azuredestinationconnectorconfiginput", "AzureSourceConnectorConfig": ".azuresourceconnectorconfig", "AzureSourceConnectorConfigTypedDict": ".azuresourceconnectorconfig", "AzureSourceConnectorConfigInput": ".azuresourceconnectorconfiginput", "AzureSourceConnectorConfigInputTypedDict": ".azuresourceconnectorconfiginput", + "BodyCreateJob": ".body_create_job", + "BodyCreateJobTypedDict": ".body_create_job", + "InputFiles": ".body_create_job", + "InputFilesTypedDict": ".body_create_job", "BodyRunWorkflow": ".body_run_workflow", + "BodyRunWorkflowInputFiles": ".body_run_workflow", + "BodyRunWorkflowInputFilesTypedDict": ".body_run_workflow", "BodyRunWorkflowTypedDict": ".body_run_workflow", - "InputFiles": ".body_run_workflow", - "InputFilesTypedDict": ".body_run_workflow", "BoxSourceConnectorConfig": ".boxsourceconnectorconfig", "BoxSourceConnectorConfigTypedDict": ".boxsourceconnectorconfig", "BoxSourceConnectorConfigInput": ".boxsourceconnectorconfiginput", @@ -796,7 +833,6 @@ "PartitionParameters": ".partition_parameters", "PartitionParametersTypedDict": ".partition_parameters", "Strategy": ".partition_parameters", - "VLMModel": ".partition_parameters", "VLMModelProvider": ".partition_parameters", "PineconeDestinationConnectorConfig": ".pineconedestinationconnectorconfig", "PineconeDestinationConnectorConfigTypedDict": ".pineconedestinationconnectorconfig", @@ -852,6 +888,12 @@ "SourceConnectorInformationConfigTypedDict": ".sourceconnectorinformation", "SourceConnectorInformationTypedDict": ".sourceconnectorinformation", "SourceConnectorType": ".sourceconnectortype", + "TemplateDetail": ".templatedetail", + "TemplateDetailTypedDict": ".templatedetail", + "TemplateListItem": ".templatelistitem", + "TemplateListItemTypedDict": ".templatelistitem", + "TemplateNode": ".templatenode", + "TemplateNodeTypedDict": ".templatenode", "UpdateDestinationConnector": ".updatedestinationconnector", "UpdateDestinationConnectorConfig": ".updatedestinationconnector", "UpdateDestinationConnectorConfigTypedDict": ".updatedestinationconnector", diff --git a/src/unstructured_client/models/shared/astradbconnectorconfig.py b/src/unstructured_client/models/shared/astradbconnectorconfig.py index 64c3f17c..8611eb28 100644 --- a/src/unstructured_client/models/shared/astradbconnectorconfig.py +++ b/src/unstructured_client/models/shared/astradbconnectorconfig.py @@ -2,6 +2,7 @@ from __future__ import annotations from pydantic import model_serializer +from typing import Optional from typing_extensions import NotRequired, TypedDict from unstructured_client.types import ( BaseModel, @@ -17,6 +18,7 @@ class AstraDBConnectorConfigTypedDict(TypedDict): batch_size: int collection_name: str token: str + binary_encode_vectors: NotRequired[bool] keyspace: NotRequired[Nullable[str]] @@ -29,11 +31,13 @@ class AstraDBConnectorConfig(BaseModel): token: str + binary_encode_vectors: Optional[bool] = True + keyspace: OptionalNullable[str] = UNSET @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["keyspace"] + optional_fields = ["binary_encode_vectors", "keyspace"] nullable_fields = ["keyspace"] null_default_fields = [] diff --git a/src/unstructured_client/models/shared/astradbconnectorconfiginput.py b/src/unstructured_client/models/shared/astradbconnectorconfiginput.py index a22dfe29..8a564382 100644 --- a/src/unstructured_client/models/shared/astradbconnectorconfiginput.py +++ b/src/unstructured_client/models/shared/astradbconnectorconfiginput.py @@ -18,6 +18,7 @@ class AstraDBConnectorConfigInputTypedDict(TypedDict): collection_name: str token: str batch_size: NotRequired[int] + binary_encode_vectors: NotRequired[bool] flatten_metadata: NotRequired[bool] keyspace: NotRequired[Nullable[str]] @@ -31,13 +32,20 @@ class AstraDBConnectorConfigInput(BaseModel): batch_size: Optional[int] = 20 + binary_encode_vectors: Optional[bool] = True + flatten_metadata: Optional[bool] = False keyspace: OptionalNullable[str] = UNSET @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["batch_size", "flatten_metadata", "keyspace"] + optional_fields = [ + "batch_size", + "binary_encode_vectors", + "flatten_metadata", + "keyspace", + ] nullable_fields = ["keyspace"] null_default_fields = [] diff --git a/src/unstructured_client/models/shared/body_create_job.py b/src/unstructured_client/models/shared/body_create_job.py new file mode 100644 index 00000000..129293ff --- /dev/null +++ b/src/unstructured_client/models/shared/body_create_job.py @@ -0,0 +1,84 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import io +import pydantic +from pydantic import model_serializer +from typing import IO, List, Optional, Union +from typing_extensions import Annotated, NotRequired, TypedDict +from unstructured_client.types import ( + BaseModel, + Nullable, + OptionalNullable, + UNSET, + UNSET_SENTINEL, +) +from unstructured_client.utils import FieldMetadata, MultipartFormMetadata + + +class InputFilesTypedDict(TypedDict): + content: Union[bytes, IO[bytes], io.BufferedReader] + file_name: str + content_type: NotRequired[str] + + +class InputFiles(BaseModel): + content: Annotated[ + Union[bytes, IO[bytes], io.BufferedReader], + pydantic.Field(alias=""), + FieldMetadata(multipart=MultipartFormMetadata(content=True)), + ] + + file_name: Annotated[ + str, pydantic.Field(alias="fileName"), FieldMetadata(multipart=True) + ] + + content_type: Annotated[ + Optional[str], + pydantic.Field(alias="Content-Type"), + FieldMetadata(multipart=True), + ] = None + + +class BodyCreateJobTypedDict(TypedDict): + request_data: str + input_files: NotRequired[Nullable[List[InputFilesTypedDict]]] + + +class BodyCreateJob(BaseModel): + request_data: Annotated[str, FieldMetadata(multipart=True)] + + input_files: Annotated[ + OptionalNullable[List[InputFiles]], + FieldMetadata(multipart=MultipartFormMetadata(file=True)), + ] = UNSET + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = ["input_files"] + nullable_fields = ["input_files"] + null_default_fields = [] + + serialized = handler(self) + + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + serialized.pop(k, None) + + optional_nullable = k in optional_fields and k in nullable_fields + is_set = ( + self.__pydantic_fields_set__.intersection({n}) + or k in null_default_fields + ) # pylint: disable=no-member + + if val is not None and val != UNSET_SENTINEL: + m[k] = val + elif val != UNSET_SENTINEL and ( + not k in optional_fields or (optional_nullable and is_set) + ): + m[k] = val + + return m diff --git a/src/unstructured_client/models/shared/body_run_workflow.py b/src/unstructured_client/models/shared/body_run_workflow.py index 4eb6866d..4bff19a4 100644 --- a/src/unstructured_client/models/shared/body_run_workflow.py +++ b/src/unstructured_client/models/shared/body_run_workflow.py @@ -16,13 +16,13 @@ from unstructured_client.utils import FieldMetadata, MultipartFormMetadata -class InputFilesTypedDict(TypedDict): +class BodyRunWorkflowInputFilesTypedDict(TypedDict): content: Union[bytes, IO[bytes], io.BufferedReader] file_name: str content_type: NotRequired[str] -class InputFiles(BaseModel): +class BodyRunWorkflowInputFiles(BaseModel): content: Annotated[ Union[bytes, IO[bytes], io.BufferedReader], pydantic.Field(alias=""), @@ -41,12 +41,12 @@ class InputFiles(BaseModel): class BodyRunWorkflowTypedDict(TypedDict): - input_files: NotRequired[Nullable[List[InputFilesTypedDict]]] + input_files: NotRequired[Nullable[List[BodyRunWorkflowInputFilesTypedDict]]] class BodyRunWorkflow(BaseModel): input_files: Annotated[ - OptionalNullable[List[InputFiles]], + OptionalNullable[List[BodyRunWorkflowInputFiles]], FieldMetadata(multipart=MultipartFormMetadata(file=True)), ] = UNSET diff --git a/src/unstructured_client/models/shared/createdestinationconnector.py b/src/unstructured_client/models/shared/createdestinationconnector.py index 3275fd05..a59ab5df 100644 --- a/src/unstructured_client/models/shared/createdestinationconnector.py +++ b/src/unstructured_client/models/shared/createdestinationconnector.py @@ -9,6 +9,10 @@ AzureAISearchConnectorConfigInput, AzureAISearchConnectorConfigInputTypedDict, ) +from .azuredestinationconnectorconfiginput import ( + AzureDestinationConnectorConfigInput, + AzureDestinationConnectorConfigInputTypedDict, +) from .couchbasedestinationconnectorconfiginput import ( CouchbaseDestinationConnectorConfigInput, CouchbaseDestinationConnectorConfigInputTypedDict, @@ -97,22 +101,23 @@ "ConfigTypedDict", Union[ GCSDestinationConnectorConfigInputTypedDict, - ElasticsearchConnectorConfigInputTypedDict, - AzureAISearchConnectorConfigInputTypedDict, WeaviateDestinationConnectorConfigInputTypedDict, + AzureAISearchConnectorConfigInputTypedDict, + ElasticsearchConnectorConfigInputTypedDict, MongoDBConnectorConfigInputTypedDict, - DeltaTableConnectorConfigInputTypedDict, QdrantCloudDestinationConnectorConfigInputTypedDict, + DeltaTableConnectorConfigInputTypedDict, PineconeDestinationConnectorConfigInputTypedDict, Neo4jDestinationConnectorConfigInputTypedDict, - OneDriveDestinationConnectorConfigInputTypedDict, + AzureDestinationConnectorConfigInputTypedDict, S3DestinationConnectorConfigInputTypedDict, - AstraDBConnectorConfigInputTypedDict, + OneDriveDestinationConnectorConfigInputTypedDict, PostgresDestinationConnectorConfigInputTypedDict, - DatabricksVolumesConnectorConfigInputTypedDict, MilvusDestinationConnectorConfigInputTypedDict, KafkaCloudDestinationConnectorConfigInputTypedDict, + DatabricksVolumesConnectorConfigInputTypedDict, CouchbaseDestinationConnectorConfigInputTypedDict, + AstraDBConnectorConfigInputTypedDict, RedisDestinationConnectorConfigInputTypedDict, DatabricksVDTDestinationConnectorConfigInputTypedDict, SnowflakeDestinationConnectorConfigInputTypedDict, @@ -126,22 +131,23 @@ "Config", Union[ GCSDestinationConnectorConfigInput, - ElasticsearchConnectorConfigInput, - AzureAISearchConnectorConfigInput, WeaviateDestinationConnectorConfigInput, + AzureAISearchConnectorConfigInput, + ElasticsearchConnectorConfigInput, MongoDBConnectorConfigInput, - DeltaTableConnectorConfigInput, QdrantCloudDestinationConnectorConfigInput, + DeltaTableConnectorConfigInput, PineconeDestinationConnectorConfigInput, Neo4jDestinationConnectorConfigInput, - OneDriveDestinationConnectorConfigInput, + AzureDestinationConnectorConfigInput, S3DestinationConnectorConfigInput, - AstraDBConnectorConfigInput, + OneDriveDestinationConnectorConfigInput, PostgresDestinationConnectorConfigInput, - DatabricksVolumesConnectorConfigInput, MilvusDestinationConnectorConfigInput, KafkaCloudDestinationConnectorConfigInput, + DatabricksVolumesConnectorConfigInput, CouchbaseDestinationConnectorConfigInput, + AstraDBConnectorConfigInput, RedisDestinationConnectorConfigInput, DatabricksVDTDestinationConnectorConfigInput, SnowflakeDestinationConnectorConfigInput, diff --git a/src/unstructured_client/models/shared/createworkflow.py b/src/unstructured_client/models/shared/createworkflow.py index 782aa0b1..741f838d 100644 --- a/src/unstructured_client/models/shared/createworkflow.py +++ b/src/unstructured_client/models/shared/createworkflow.py @@ -37,6 +37,7 @@ class CreateWorkflowTypedDict(TypedDict): reprocess_all: NotRequired[Nullable[bool]] schedule: NotRequired[Nullable[Schedule]] source_id: NotRequired[Nullable[str]] + template_id: NotRequired[Nullable[str]] workflow_nodes: NotRequired[Nullable[List[WorkflowNodeTypedDict]]] @@ -53,6 +54,8 @@ class CreateWorkflow(BaseModel): source_id: OptionalNullable[str] = UNSET + template_id: OptionalNullable[str] = UNSET + workflow_nodes: OptionalNullable[List[WorkflowNode]] = UNSET @model_serializer(mode="wrap") @@ -62,6 +65,7 @@ def serialize_model(self, handler): "reprocess_all", "schedule", "source_id", + "template_id", "workflow_nodes", ] nullable_fields = [ @@ -69,6 +73,7 @@ def serialize_model(self, handler): "reprocess_all", "schedule", "source_id", + "template_id", "workflow_nodes", ] null_default_fields = [] diff --git a/src/unstructured_client/models/shared/databricksvdtdestinationconnectorconfiginput.py b/src/unstructured_client/models/shared/databricksvdtdestinationconnectorconfiginput.py index 5ab64381..8ff672e0 100644 --- a/src/unstructured_client/models/shared/databricksvdtdestinationconnectorconfiginput.py +++ b/src/unstructured_client/models/shared/databricksvdtdestinationconnectorconfiginput.py @@ -43,7 +43,7 @@ class DatabricksVDTDestinationConnectorConfigInput(BaseModel): database: Optional[str] = "default" - schema_: Annotated[Optional[str], pydantic.Field(alias="schema")] = None + schema_: Annotated[Optional[str], pydantic.Field(alias="schema")] = "default" table_name: OptionalNullable[str] = UNSET diff --git a/src/unstructured_client/models/shared/databricksvolumesconnectorconfiginput.py b/src/unstructured_client/models/shared/databricksvolumesconnectorconfiginput.py index f516bde4..a056900b 100644 --- a/src/unstructured_client/models/shared/databricksvolumesconnectorconfiginput.py +++ b/src/unstructured_client/models/shared/databricksvolumesconnectorconfiginput.py @@ -30,4 +30,4 @@ class DatabricksVolumesConnectorConfigInput(BaseModel): volume_path: str - schema_: Annotated[Optional[str], pydantic.Field(alias="schema")] = None + schema_: Annotated[Optional[str], pydantic.Field(alias="schema")] = "default" diff --git a/src/unstructured_client/models/shared/destinationconnectorinformation.py b/src/unstructured_client/models/shared/destinationconnectorinformation.py index 0e912623..11237f6e 100644 --- a/src/unstructured_client/models/shared/destinationconnectorinformation.py +++ b/src/unstructured_client/models/shared/destinationconnectorinformation.py @@ -9,6 +9,10 @@ AzureAISearchConnectorConfig, AzureAISearchConnectorConfigTypedDict, ) +from .azuredestinationconnectorconfig import ( + AzureDestinationConnectorConfig, + AzureDestinationConnectorConfigTypedDict, +) from .couchbasedestinationconnectorconfig import ( CouchbaseDestinationConnectorConfig, CouchbaseDestinationConnectorConfigTypedDict, @@ -106,20 +110,21 @@ Union[ GCSDestinationConnectorConfigTypedDict, ElasticsearchConnectorConfigTypedDict, + MongoDBConnectorConfigTypedDict, AzureAISearchConnectorConfigTypedDict, WeaviateDestinationConnectorConfigTypedDict, - MongoDBConnectorConfigTypedDict, DeltaTableConnectorConfigTypedDict, QdrantCloudDestinationConnectorConfigTypedDict, PineconeDestinationConnectorConfigTypedDict, - AstraDBConnectorConfigTypedDict, + AzureDestinationConnectorConfigTypedDict, Neo4jDestinationConnectorConfigTypedDict, OneDriveDestinationConnectorConfigTypedDict, S3DestinationConnectorConfigTypedDict, + AstraDBConnectorConfigTypedDict, PostgresDestinationConnectorConfigTypedDict, - DatabricksVolumesConnectorConfigTypedDict, MilvusDestinationConnectorConfigTypedDict, KafkaCloudDestinationConnectorConfigTypedDict, + DatabricksVolumesConnectorConfigTypedDict, CouchbaseDestinationConnectorConfigTypedDict, RedisDestinationConnectorConfigTypedDict, DatabricksVDTDestinationConnectorConfigTypedDict, @@ -135,20 +140,21 @@ Union[ GCSDestinationConnectorConfig, ElasticsearchConnectorConfig, + MongoDBConnectorConfig, AzureAISearchConnectorConfig, WeaviateDestinationConnectorConfig, - MongoDBConnectorConfig, DeltaTableConnectorConfig, QdrantCloudDestinationConnectorConfig, PineconeDestinationConnectorConfig, - AstraDBConnectorConfig, + AzureDestinationConnectorConfig, Neo4jDestinationConnectorConfig, OneDriveDestinationConnectorConfig, S3DestinationConnectorConfig, + AstraDBConnectorConfig, PostgresDestinationConnectorConfig, - DatabricksVolumesConnectorConfig, MilvusDestinationConnectorConfig, KafkaCloudDestinationConnectorConfig, + DatabricksVolumesConnectorConfig, CouchbaseDestinationConnectorConfig, RedisDestinationConnectorConfig, DatabricksVDTDestinationConnectorConfig, diff --git a/src/unstructured_client/models/shared/destinationconnectortype.py b/src/unstructured_client/models/shared/destinationconnectortype.py index 6de4b97c..d463db75 100644 --- a/src/unstructured_client/models/shared/destinationconnectortype.py +++ b/src/unstructured_client/models/shared/destinationconnectortype.py @@ -6,6 +6,7 @@ class DestinationConnectorType(str, Enum, metaclass=utils.OpenEnumMeta): + AZURE = "azure" ASTRADB = "astradb" AZURE_AI_SEARCH = "azure_ai_search" COUCHBASE = "couchbase" diff --git a/src/unstructured_client/models/shared/nodefilemetadata.py b/src/unstructured_client/models/shared/nodefilemetadata.py index a5bddde1..41816905 100644 --- a/src/unstructured_client/models/shared/nodefilemetadata.py +++ b/src/unstructured_client/models/shared/nodefilemetadata.py @@ -8,9 +8,15 @@ class NodeFileMetadataTypedDict(TypedDict): file_id: str node_id: str + node_subtype: str + node_type: str class NodeFileMetadata(BaseModel): file_id: str node_id: str + + node_subtype: str + + node_type: str diff --git a/src/unstructured_client/models/shared/partition_parameters.py b/src/unstructured_client/models/shared/partition_parameters.py index e50ade3a..0098c0fe 100644 --- a/src/unstructured_client/models/shared/partition_parameters.py +++ b/src/unstructured_client/models/shared/partition_parameters.py @@ -64,35 +64,6 @@ class Strategy(str, Enum, metaclass=utils.OpenEnumMeta): VLM = "vlm" -class VLMModel(str, Enum, metaclass=utils.OpenEnumMeta): - r"""The VLM Model to use.""" - - CLAUDE_3_5_SONNET_20241022 = "claude-3-5-sonnet-20241022" - CLAUDE_3_7_SONNET_20250219 = "claude-3-7-sonnet-20250219" - GPT_4O = "gpt-4o" - GEMINI_1_5_PRO = "gemini-1.5-pro" - US_AMAZON_NOVA_PRO_V1_0 = "us.amazon.nova-pro-v1:0" - US_AMAZON_NOVA_LITE_V1_0 = "us.amazon.nova-lite-v1:0" - US_ANTHROPIC_CLAUDE_3_7_SONNET_20250219_V1_0 = ( - "us.anthropic.claude-3-7-sonnet-20250219-v1:0" - ) - US_ANTHROPIC_CLAUDE_3_5_SONNET_20241022_V2_0 = ( - "us.anthropic.claude-3-5-sonnet-20241022-v2:0" - ) - US_ANTHROPIC_CLAUDE_3_OPUS_20240229_V1_0 = ( - "us.anthropic.claude-3-opus-20240229-v1:0" - ) - US_ANTHROPIC_CLAUDE_3_HAIKU_20240307_V1_0 = ( - "us.anthropic.claude-3-haiku-20240307-v1:0" - ) - US_ANTHROPIC_CLAUDE_3_SONNET_20240229_V1_0 = ( - "us.anthropic.claude-3-sonnet-20240229-v1:0" - ) - US_META_LLAMA3_2_90B_INSTRUCT_V1_0 = "us.meta.llama3-2-90b-instruct-v1:0" - US_META_LLAMA3_2_11B_INSTRUCT_V1_0 = "us.meta.llama3-2-11b-instruct-v1:0" - GEMINI_2_0_FLASH_001 = "gemini-2.0-flash-001" - - class VLMModelProvider(str, Enum, metaclass=utils.OpenEnumMeta): r"""The VLM Model provider to use.""" @@ -116,6 +87,8 @@ class PartitionParametersTypedDict(TypedDict): r"""A hint about the content type to use (such as text/markdown), when there are problems processing a specific file. This value is a MIME type in the format type/subtype.""" coordinates: NotRequired[bool] r"""If `True`, return coordinates for each element extracted via OCR. Default: `False`""" + do_not_break_similarity_on_footer_header: NotRequired[bool] + r"""When `True`, footer, header, and page number are always considered similar to the text before them for chunk by similarity method. This allows chunk by similarity to connect contents across page better.""" encoding: NotRequired[Nullable[str]] r"""The encoding method used to decode the text input. Default: utf-8""" extract_image_block_types: NotRequired[List[str]] @@ -180,7 +153,7 @@ class PartitionParametersTypedDict(TypedDict): r"""The OCR agent to use for table ocr inference.""" unique_element_ids: NotRequired[bool] r"""When `True`, assign UUIDs to element IDs, which guarantees their uniqueness (useful when using them as primary keys in database). Otherwise a SHA-256 of element text is used. Default: `False`""" - vlm_model: NotRequired[VLMModel] + vlm_model: NotRequired[str] r"""The VLM Model to use.""" vlm_model_provider: NotRequired[VLMModelProvider] r"""The VLM Model provider to use.""" @@ -208,6 +181,11 @@ class PartitionParameters(BaseModel): coordinates: Annotated[Optional[bool], FieldMetadata(multipart=True)] = False r"""If `True`, return coordinates for each element extracted via OCR. Default: `False`""" + do_not_break_similarity_on_footer_header: Annotated[ + Optional[bool], FieldMetadata(multipart=True) + ] = False + r"""When `True`, footer, header, and page number are always considered similar to the text before them for chunk by similarity method. This allows chunk by similarity to connect contents across page better.""" + encoding: Annotated[OptionalNullable[str], FieldMetadata(multipart=True)] = None r"""The encoding method used to decode the text input. Default: utf-8""" @@ -352,10 +330,7 @@ class PartitionParameters(BaseModel): unique_element_ids: Annotated[Optional[bool], FieldMetadata(multipart=True)] = False r"""When `True`, assign UUIDs to element IDs, which guarantees their uniqueness (useful when using them as primary keys in database). Otherwise a SHA-256 of element text is used. Default: `False`""" - vlm_model: Annotated[ - Annotated[Optional[VLMModel], PlainValidator(validate_open_enum(False))], - FieldMetadata(multipart=True), - ] = None + vlm_model: Annotated[Optional[str], FieldMetadata(multipart=True)] = None r"""The VLM Model to use.""" vlm_model_provider: Annotated[ @@ -376,6 +351,7 @@ def serialize_model(self, handler): "combine_under_n_chars", "content_type", "coordinates", + "do_not_break_similarity_on_footer_header", "encoding", "extract_image_block_types", "gz_uncompressed_content_type", diff --git a/src/unstructured_client/models/shared/snowflakedestinationconnectorconfig.py b/src/unstructured_client/models/shared/snowflakedestinationconnectorconfig.py index 47e74cd5..a1798144 100644 --- a/src/unstructured_client/models/shared/snowflakedestinationconnectorconfig.py +++ b/src/unstructured_client/models/shared/snowflakedestinationconnectorconfig.py @@ -20,11 +20,11 @@ class SnowflakeDestinationConnectorConfigTypedDict(TypedDict): host: str password: str role: str + schema_: str user: str batch_size: NotRequired[int] port: NotRequired[int] record_id_key: NotRequired[Nullable[str]] - schema_: NotRequired[str] table_name: NotRequired[str] @@ -39,6 +39,8 @@ class SnowflakeDestinationConnectorConfig(BaseModel): role: str + schema_: Annotated[str, pydantic.Field(alias="schema")] + user: str batch_size: Optional[int] = 50 @@ -47,19 +49,11 @@ class SnowflakeDestinationConnectorConfig(BaseModel): record_id_key: OptionalNullable[str] = UNSET - schema_: Annotated[Optional[str], pydantic.Field(alias="schema")] = None - table_name: Optional[str] = "elements" @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "batch_size", - "port", - "record_id_key", - "schema", - "table_name", - ] + optional_fields = ["batch_size", "port", "record_id_key", "table_name"] nullable_fields = ["record_id_key"] null_default_fields = [] diff --git a/src/unstructured_client/models/shared/snowflakedestinationconnectorconfiginput.py b/src/unstructured_client/models/shared/snowflakedestinationconnectorconfiginput.py index 3c9040c9..105f197c 100644 --- a/src/unstructured_client/models/shared/snowflakedestinationconnectorconfiginput.py +++ b/src/unstructured_client/models/shared/snowflakedestinationconnectorconfiginput.py @@ -20,11 +20,11 @@ class SnowflakeDestinationConnectorConfigInputTypedDict(TypedDict): host: str password: str role: str + schema_: str user: str batch_size: NotRequired[int] port: NotRequired[int] record_id_key: NotRequired[Nullable[str]] - schema_: NotRequired[str] table_name: NotRequired[str] @@ -39,6 +39,8 @@ class SnowflakeDestinationConnectorConfigInput(BaseModel): role: str + schema_: Annotated[str, pydantic.Field(alias="schema")] + user: str batch_size: Optional[int] = 50 @@ -47,19 +49,11 @@ class SnowflakeDestinationConnectorConfigInput(BaseModel): record_id_key: OptionalNullable[str] = UNSET - schema_: Annotated[Optional[str], pydantic.Field(alias="schema")] = None - table_name: Optional[str] = "elements" @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = [ - "batch_size", - "port", - "record_id_key", - "schema", - "table_name", - ] + optional_fields = ["batch_size", "port", "record_id_key", "table_name"] nullable_fields = ["record_id_key"] null_default_fields = [] diff --git a/src/unstructured_client/models/shared/snowflakesourceconnectorconfig.py b/src/unstructured_client/models/shared/snowflakesourceconnectorconfig.py index 0fafc35f..e7aa07a5 100644 --- a/src/unstructured_client/models/shared/snowflakesourceconnectorconfig.py +++ b/src/unstructured_client/models/shared/snowflakesourceconnectorconfig.py @@ -21,12 +21,12 @@ class SnowflakeSourceConnectorConfigTypedDict(TypedDict): id_column: str password: str role: str + schema_: str table_name: str user: str batch_size: NotRequired[int] fields: NotRequired[Nullable[List[str]]] port: NotRequired[int] - schema_: NotRequired[str] class SnowflakeSourceConnectorConfig(BaseModel): @@ -42,6 +42,8 @@ class SnowflakeSourceConnectorConfig(BaseModel): role: str + schema_: Annotated[str, pydantic.Field(alias="schema")] + table_name: str user: str @@ -52,11 +54,9 @@ class SnowflakeSourceConnectorConfig(BaseModel): port: Optional[int] = 443 - schema_: Annotated[Optional[str], pydantic.Field(alias="schema")] = None - @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["batch_size", "fields", "port", "schema"] + optional_fields = ["batch_size", "fields", "port"] nullable_fields = ["fields"] null_default_fields = [] diff --git a/src/unstructured_client/models/shared/snowflakesourceconnectorconfiginput.py b/src/unstructured_client/models/shared/snowflakesourceconnectorconfiginput.py index 792d8462..e801d514 100644 --- a/src/unstructured_client/models/shared/snowflakesourceconnectorconfiginput.py +++ b/src/unstructured_client/models/shared/snowflakesourceconnectorconfiginput.py @@ -21,12 +21,12 @@ class SnowflakeSourceConnectorConfigInputTypedDict(TypedDict): id_column: str password: str role: str + schema_: str table_name: str user: str batch_size: NotRequired[int] fields: NotRequired[Nullable[List[str]]] port: NotRequired[int] - schema_: NotRequired[str] class SnowflakeSourceConnectorConfigInput(BaseModel): @@ -42,6 +42,8 @@ class SnowflakeSourceConnectorConfigInput(BaseModel): role: str + schema_: Annotated[str, pydantic.Field(alias="schema")] + table_name: str user: str @@ -52,11 +54,9 @@ class SnowflakeSourceConnectorConfigInput(BaseModel): port: Optional[int] = 443 - schema_: Annotated[Optional[str], pydantic.Field(alias="schema")] = None - @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["batch_size", "fields", "port", "schema"] + optional_fields = ["batch_size", "fields", "port"] nullable_fields = ["fields"] null_default_fields = [] diff --git a/src/unstructured_client/models/shared/templatedetail.py b/src/unstructured_client/models/shared/templatedetail.py new file mode 100644 index 00000000..bb770eed --- /dev/null +++ b/src/unstructured_client/models/shared/templatedetail.py @@ -0,0 +1,34 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from .templatenode import TemplateNode, TemplateNodeTypedDict +from typing import List +from typing_extensions import TypedDict +from unstructured_client.types import BaseModel + + +class TemplateDetailTypedDict(TypedDict): + r"""Full template details including nodes.""" + + description: str + id: str + last_updated: str + name: str + nodes: List[TemplateNodeTypedDict] + version: str + + +class TemplateDetail(BaseModel): + r"""Full template details including nodes.""" + + description: str + + id: str + + last_updated: str + + name: str + + nodes: List[TemplateNode] + + version: str diff --git a/src/unstructured_client/models/shared/templatelistitem.py b/src/unstructured_client/models/shared/templatelistitem.py new file mode 100644 index 00000000..4c182ee2 --- /dev/null +++ b/src/unstructured_client/models/shared/templatelistitem.py @@ -0,0 +1,29 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from typing_extensions import TypedDict +from unstructured_client.types import BaseModel + + +class TemplateListItemTypedDict(TypedDict): + r"""Template metadata for list responses.""" + + description: str + id: str + last_updated: str + name: str + version: str + + +class TemplateListItem(BaseModel): + r"""Template metadata for list responses.""" + + description: str + + id: str + + last_updated: str + + name: str + + version: str diff --git a/src/unstructured_client/models/shared/templatenode.py b/src/unstructured_client/models/shared/templatenode.py new file mode 100644 index 00000000..25a4b413 --- /dev/null +++ b/src/unstructured_client/models/shared/templatenode.py @@ -0,0 +1,67 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from pydantic import model_serializer +from typing import Any, Dict +from typing_extensions import NotRequired, TypedDict +from unstructured_client.types import ( + BaseModel, + Nullable, + OptionalNullable, + UNSET, + UNSET_SENTINEL, +) + + +class TemplateNodeTypedDict(TypedDict): + r"""A node in a template DAG.""" + + id: str + name: str + subtype: str + type: str + settings: NotRequired[Nullable[Dict[str, Any]]] + + +class TemplateNode(BaseModel): + r"""A node in a template DAG.""" + + id: str + + name: str + + subtype: str + + type: str + + settings: OptionalNullable[Dict[str, Any]] = UNSET + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + optional_fields = ["settings"] + nullable_fields = ["settings"] + null_default_fields = [] + + serialized = handler(self) + + m = {} + + for n, f in type(self).model_fields.items(): + k = f.alias or n + val = serialized.get(k) + serialized.pop(k, None) + + optional_nullable = k in optional_fields and k in nullable_fields + is_set = ( + self.__pydantic_fields_set__.intersection({n}) + or k in null_default_fields + ) # pylint: disable=no-member + + if val is not None and val != UNSET_SENTINEL: + m[k] = val + elif val != UNSET_SENTINEL and ( + not k in optional_fields or (optional_nullable and is_set) + ): + m[k] = val + + return m diff --git a/src/unstructured_client/models/shared/updatedestinationconnector.py b/src/unstructured_client/models/shared/updatedestinationconnector.py index e10814f6..71b95975 100644 --- a/src/unstructured_client/models/shared/updatedestinationconnector.py +++ b/src/unstructured_client/models/shared/updatedestinationconnector.py @@ -9,6 +9,10 @@ AzureAISearchConnectorConfigInput, AzureAISearchConnectorConfigInputTypedDict, ) +from .azuredestinationconnectorconfiginput import ( + AzureDestinationConnectorConfigInput, + AzureDestinationConnectorConfigInputTypedDict, +) from .couchbasedestinationconnectorconfiginput import ( CouchbaseDestinationConnectorConfigInput, CouchbaseDestinationConnectorConfigInputTypedDict, @@ -94,22 +98,23 @@ "UpdateDestinationConnectorConfigTypedDict", Union[ GCSDestinationConnectorConfigInputTypedDict, - ElasticsearchConnectorConfigInputTypedDict, - AzureAISearchConnectorConfigInputTypedDict, WeaviateDestinationConnectorConfigInputTypedDict, + AzureAISearchConnectorConfigInputTypedDict, + ElasticsearchConnectorConfigInputTypedDict, MongoDBConnectorConfigInputTypedDict, - DeltaTableConnectorConfigInputTypedDict, QdrantCloudDestinationConnectorConfigInputTypedDict, + DeltaTableConnectorConfigInputTypedDict, PineconeDestinationConnectorConfigInputTypedDict, Neo4jDestinationConnectorConfigInputTypedDict, - OneDriveDestinationConnectorConfigInputTypedDict, + AzureDestinationConnectorConfigInputTypedDict, S3DestinationConnectorConfigInputTypedDict, - AstraDBConnectorConfigInputTypedDict, + OneDriveDestinationConnectorConfigInputTypedDict, PostgresDestinationConnectorConfigInputTypedDict, - DatabricksVolumesConnectorConfigInputTypedDict, MilvusDestinationConnectorConfigInputTypedDict, KafkaCloudDestinationConnectorConfigInputTypedDict, + DatabricksVolumesConnectorConfigInputTypedDict, CouchbaseDestinationConnectorConfigInputTypedDict, + AstraDBConnectorConfigInputTypedDict, RedisDestinationConnectorConfigInputTypedDict, DatabricksVDTDestinationConnectorConfigInputTypedDict, SnowflakeDestinationConnectorConfigInputTypedDict, @@ -123,22 +128,23 @@ "UpdateDestinationConnectorConfig", Union[ GCSDestinationConnectorConfigInput, - ElasticsearchConnectorConfigInput, - AzureAISearchConnectorConfigInput, WeaviateDestinationConnectorConfigInput, + AzureAISearchConnectorConfigInput, + ElasticsearchConnectorConfigInput, MongoDBConnectorConfigInput, - DeltaTableConnectorConfigInput, QdrantCloudDestinationConnectorConfigInput, + DeltaTableConnectorConfigInput, PineconeDestinationConnectorConfigInput, Neo4jDestinationConnectorConfigInput, - OneDriveDestinationConnectorConfigInput, + AzureDestinationConnectorConfigInput, S3DestinationConnectorConfigInput, - AstraDBConnectorConfigInput, + OneDriveDestinationConnectorConfigInput, PostgresDestinationConnectorConfigInput, - DatabricksVolumesConnectorConfigInput, MilvusDestinationConnectorConfigInput, KafkaCloudDestinationConnectorConfigInput, + DatabricksVolumesConnectorConfigInput, CouchbaseDestinationConnectorConfigInput, + AstraDBConnectorConfigInput, RedisDestinationConnectorConfigInput, DatabricksVDTDestinationConnectorConfigInput, SnowflakeDestinationConnectorConfigInput, diff --git a/src/unstructured_client/models/shared/updateworkflow.py b/src/unstructured_client/models/shared/updateworkflow.py index 1198f32d..3f474988 100644 --- a/src/unstructured_client/models/shared/updateworkflow.py +++ b/src/unstructured_client/models/shared/updateworkflow.py @@ -36,6 +36,7 @@ class UpdateWorkflowTypedDict(TypedDict): reprocess_all: NotRequired[Nullable[bool]] schedule: NotRequired[Nullable[UpdateWorkflowSchedule]] source_id: NotRequired[Nullable[str]] + template_id: NotRequired[Nullable[str]] workflow_nodes: NotRequired[Nullable[List[WorkflowNodeTypedDict]]] workflow_type: NotRequired[Nullable[WorkflowType]] @@ -51,6 +52,8 @@ class UpdateWorkflow(BaseModel): source_id: OptionalNullable[str] = UNSET + template_id: OptionalNullable[str] = UNSET + workflow_nodes: OptionalNullable[List[WorkflowNode]] = UNSET workflow_type: OptionalNullable[WorkflowType] = UNSET @@ -63,6 +66,7 @@ def serialize_model(self, handler): "reprocess_all", "schedule", "source_id", + "template_id", "workflow_nodes", "workflow_type", ] @@ -72,6 +76,7 @@ def serialize_model(self, handler): "reprocess_all", "schedule", "source_id", + "template_id", "workflow_nodes", "workflow_type", ] diff --git a/src/unstructured_client/models/shared/workflowinformation.py b/src/unstructured_client/models/shared/workflowinformation.py index 599ce3f2..2025fa84 100644 --- a/src/unstructured_client/models/shared/workflowinformation.py +++ b/src/unstructured_client/models/shared/workflowinformation.py @@ -7,7 +7,7 @@ from .workflowtype import WorkflowType from datetime import datetime from pydantic import model_serializer -from typing import List +from typing import List, Optional from typing_extensions import NotRequired, TypedDict from unstructured_client.types import ( BaseModel, @@ -26,7 +26,7 @@ class WorkflowInformationTypedDict(TypedDict): sources: List[str] status: WorkflowState workflow_nodes: List[WorkflowNodeTypedDict] - reprocess_all: NotRequired[Nullable[bool]] + reprocess_all: NotRequired[bool] schedule: NotRequired[Nullable[WorkflowScheduleTypedDict]] updated_at: NotRequired[Nullable[datetime]] workflow_type: NotRequired[Nullable[WorkflowType]] @@ -47,7 +47,7 @@ class WorkflowInformation(BaseModel): workflow_nodes: List[WorkflowNode] - reprocess_all: OptionalNullable[bool] = UNSET + reprocess_all: Optional[bool] = False schedule: OptionalNullable[WorkflowSchedule] = UNSET @@ -58,7 +58,7 @@ class WorkflowInformation(BaseModel): @model_serializer(mode="wrap") def serialize_model(self, handler): optional_fields = ["reprocess_all", "schedule", "updated_at", "workflow_type"] - nullable_fields = ["reprocess_all", "schedule", "updated_at", "workflow_type"] + nullable_fields = ["schedule", "updated_at", "workflow_type"] null_default_fields = [] serialized = handler(self) diff --git a/src/unstructured_client/models/shared/workflowjobtype.py b/src/unstructured_client/models/shared/workflowjobtype.py index 1458e867..08d201c6 100644 --- a/src/unstructured_client/models/shared/workflowjobtype.py +++ b/src/unstructured_client/models/shared/workflowjobtype.py @@ -8,3 +8,4 @@ class WorkflowJobType(str, Enum): EPHEMERAL = "ephemeral" PERSISTENT = "persistent" SCHEDULED = "scheduled" + TEMPLATE = "template" diff --git a/src/unstructured_client/sdk.py b/src/unstructured_client/sdk.py index 014bb146..3671fa6e 100644 --- a/src/unstructured_client/sdk.py +++ b/src/unstructured_client/sdk.py @@ -19,6 +19,7 @@ from unstructured_client.general import General from unstructured_client.jobs import Jobs from unstructured_client.sources import Sources + from unstructured_client.templates import Templates from unstructured_client.workflows import Workflows @@ -26,12 +27,14 @@ class UnstructuredClient(BaseSDK): destinations: "Destinations" jobs: "Jobs" sources: "Sources" + templates: "Templates" workflows: "Workflows" general: "General" _sub_sdk_map = { "destinations": ("unstructured_client.destinations", "Destinations"), "jobs": ("unstructured_client.jobs", "Jobs"), "sources": ("unstructured_client.sources", "Sources"), + "templates": ("unstructured_client.templates", "Templates"), "workflows": ("unstructured_client.workflows", "Workflows"), "general": ("unstructured_client.general", "General"), } diff --git a/src/unstructured_client/templates.py b/src/unstructured_client/templates.py new file mode 100644 index 00000000..e1fc46b8 --- /dev/null +++ b/src/unstructured_client/templates.py @@ -0,0 +1,411 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from .basesdk import BaseSDK +from typing import Any, List, Mapping, Optional, Union, cast +from unstructured_client import utils +from unstructured_client._hooks import HookContext +from unstructured_client.models import errors, operations, shared +from unstructured_client.types import BaseModel, OptionalNullable, UNSET +from unstructured_client.utils.unmarshal_json_response import unmarshal_json_response + + +class Templates(BaseSDK): + def get_template( + self, + *, + request: Union[ + operations.GetTemplateRequest, operations.GetTemplateRequestTypedDict + ], + retries: OptionalNullable[utils.RetryConfig] = UNSET, + server_url: Optional[str] = None, + timeout_ms: Optional[int] = None, + http_headers: Optional[Mapping[str, str]] = None, + ) -> operations.GetTemplateResponse: + r"""Get Template + + Retrieve detailed information and DAG for a specific template. + + :param request: The request object to send. + :param retries: Override the default retry configuration for this method + :param server_url: Override the default server URL for this method + :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + :param http_headers: Additional headers to set or replace on requests. + """ + base_url = None + url_variables = None + if timeout_ms is None: + timeout_ms = self.sdk_configuration.timeout_ms + + if server_url is not None: + base_url = server_url + else: + base_url = self._get_url(base_url, url_variables) + + if not isinstance(request, BaseModel): + request = utils.unmarshal(request, operations.GetTemplateRequest) + request = cast(operations.GetTemplateRequest, request) + + req = self._build_request( + method="GET", + path="/api/v1/templates/{template_id}", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + http_headers=http_headers, + security=self.sdk_configuration.security, + timeout_ms=timeout_ms, + ) + + if retries == UNSET: + if self.sdk_configuration.retry_config is not UNSET: + retries = self.sdk_configuration.retry_config + else: + retries = utils.RetryConfig( + "backoff", utils.BackoffStrategy(3000, 720000, 1.88, 1800000), True + ) + + retry_config = None + if isinstance(retries, utils.RetryConfig): + retry_config = (retries, ["5xx"]) + + http_res = self.do_request( + hook_ctx=HookContext( + config=self.sdk_configuration, + base_url=base_url or "", + operation_id="get_template", + oauth2_scopes=[], + security_source=self.sdk_configuration.security, + ), + request=req, + error_status_codes=["422", "4XX", "5XX"], + retry_config=retry_config, + ) + + response_data: Any = None + if utils.match_response(http_res, "200", "application/json"): + return operations.GetTemplateResponse( + template_detail=unmarshal_json_response( + Optional[shared.TemplateDetail], http_res + ), + status_code=http_res.status_code, + content_type=http_res.headers.get("Content-Type") or "", + raw_response=http_res, + ) + if utils.match_response(http_res, "422", "application/json"): + response_data = unmarshal_json_response( + errors.HTTPValidationErrorData, http_res + ) + raise errors.HTTPValidationError(response_data, http_res) + if utils.match_response(http_res, "4XX", "*"): + http_res_text = utils.stream_to_text(http_res) + raise errors.SDKError("API error occurred", http_res, http_res_text) + if utils.match_response(http_res, "5XX", "*"): + http_res_text = utils.stream_to_text(http_res) + raise errors.SDKError("API error occurred", http_res, http_res_text) + + raise errors.SDKError("Unexpected response received", http_res) + + async def get_template_async( + self, + *, + request: Union[ + operations.GetTemplateRequest, operations.GetTemplateRequestTypedDict + ], + retries: OptionalNullable[utils.RetryConfig] = UNSET, + server_url: Optional[str] = None, + timeout_ms: Optional[int] = None, + http_headers: Optional[Mapping[str, str]] = None, + ) -> operations.GetTemplateResponse: + r"""Get Template + + Retrieve detailed information and DAG for a specific template. + + :param request: The request object to send. + :param retries: Override the default retry configuration for this method + :param server_url: Override the default server URL for this method + :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + :param http_headers: Additional headers to set or replace on requests. + """ + base_url = None + url_variables = None + if timeout_ms is None: + timeout_ms = self.sdk_configuration.timeout_ms + + if server_url is not None: + base_url = server_url + else: + base_url = self._get_url(base_url, url_variables) + + if not isinstance(request, BaseModel): + request = utils.unmarshal(request, operations.GetTemplateRequest) + request = cast(operations.GetTemplateRequest, request) + + req = self._build_request_async( + method="GET", + path="/api/v1/templates/{template_id}", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + http_headers=http_headers, + security=self.sdk_configuration.security, + timeout_ms=timeout_ms, + ) + + if retries == UNSET: + if self.sdk_configuration.retry_config is not UNSET: + retries = self.sdk_configuration.retry_config + else: + retries = utils.RetryConfig( + "backoff", utils.BackoffStrategy(3000, 720000, 1.88, 1800000), True + ) + + retry_config = None + if isinstance(retries, utils.RetryConfig): + retry_config = (retries, ["5xx"]) + + http_res = await self.do_request_async( + hook_ctx=HookContext( + config=self.sdk_configuration, + base_url=base_url or "", + operation_id="get_template", + oauth2_scopes=[], + security_source=self.sdk_configuration.security, + ), + request=req, + error_status_codes=["422", "4XX", "5XX"], + retry_config=retry_config, + ) + + response_data: Any = None + if utils.match_response(http_res, "200", "application/json"): + return operations.GetTemplateResponse( + template_detail=unmarshal_json_response( + Optional[shared.TemplateDetail], http_res + ), + status_code=http_res.status_code, + content_type=http_res.headers.get("Content-Type") or "", + raw_response=http_res, + ) + if utils.match_response(http_res, "422", "application/json"): + response_data = unmarshal_json_response( + errors.HTTPValidationErrorData, http_res + ) + raise errors.HTTPValidationError(response_data, http_res) + if utils.match_response(http_res, "4XX", "*"): + http_res_text = await utils.stream_to_text_async(http_res) + raise errors.SDKError("API error occurred", http_res, http_res_text) + if utils.match_response(http_res, "5XX", "*"): + http_res_text = await utils.stream_to_text_async(http_res) + raise errors.SDKError("API error occurred", http_res, http_res_text) + + raise errors.SDKError("Unexpected response received", http_res) + + def list_templates( + self, + *, + request: Union[ + operations.ListTemplatesRequest, operations.ListTemplatesRequestTypedDict + ], + retries: OptionalNullable[utils.RetryConfig] = UNSET, + server_url: Optional[str] = None, + timeout_ms: Optional[int] = None, + http_headers: Optional[Mapping[str, str]] = None, + ) -> operations.ListTemplatesResponse: + r"""List Templates + + Retrieve a list of available templates with their metadata. + + :param request: The request object to send. + :param retries: Override the default retry configuration for this method + :param server_url: Override the default server URL for this method + :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + :param http_headers: Additional headers to set or replace on requests. + """ + base_url = None + url_variables = None + if timeout_ms is None: + timeout_ms = self.sdk_configuration.timeout_ms + + if server_url is not None: + base_url = server_url + else: + base_url = self._get_url(base_url, url_variables) + + if not isinstance(request, BaseModel): + request = utils.unmarshal(request, operations.ListTemplatesRequest) + request = cast(operations.ListTemplatesRequest, request) + + req = self._build_request( + method="GET", + path="/api/v1/templates/", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=False, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + http_headers=http_headers, + security=self.sdk_configuration.security, + timeout_ms=timeout_ms, + ) + + if retries == UNSET: + if self.sdk_configuration.retry_config is not UNSET: + retries = self.sdk_configuration.retry_config + else: + retries = utils.RetryConfig( + "backoff", utils.BackoffStrategy(3000, 720000, 1.88, 1800000), True + ) + + retry_config = None + if isinstance(retries, utils.RetryConfig): + retry_config = (retries, ["5xx"]) + + http_res = self.do_request( + hook_ctx=HookContext( + config=self.sdk_configuration, + base_url=base_url or "", + operation_id="list_templates", + oauth2_scopes=[], + security_source=self.sdk_configuration.security, + ), + request=req, + error_status_codes=["422", "4XX", "5XX"], + retry_config=retry_config, + ) + + response_data: Any = None + if utils.match_response(http_res, "200", "application/json"): + return operations.ListTemplatesResponse( + response_list_templates=unmarshal_json_response( + Optional[List[shared.TemplateListItem]], http_res + ), + status_code=http_res.status_code, + content_type=http_res.headers.get("Content-Type") or "", + raw_response=http_res, + ) + if utils.match_response(http_res, "422", "application/json"): + response_data = unmarshal_json_response( + errors.HTTPValidationErrorData, http_res + ) + raise errors.HTTPValidationError(response_data, http_res) + if utils.match_response(http_res, "4XX", "*"): + http_res_text = utils.stream_to_text(http_res) + raise errors.SDKError("API error occurred", http_res, http_res_text) + if utils.match_response(http_res, "5XX", "*"): + http_res_text = utils.stream_to_text(http_res) + raise errors.SDKError("API error occurred", http_res, http_res_text) + + raise errors.SDKError("Unexpected response received", http_res) + + async def list_templates_async( + self, + *, + request: Union[ + operations.ListTemplatesRequest, operations.ListTemplatesRequestTypedDict + ], + retries: OptionalNullable[utils.RetryConfig] = UNSET, + server_url: Optional[str] = None, + timeout_ms: Optional[int] = None, + http_headers: Optional[Mapping[str, str]] = None, + ) -> operations.ListTemplatesResponse: + r"""List Templates + + Retrieve a list of available templates with their metadata. + + :param request: The request object to send. + :param retries: Override the default retry configuration for this method + :param server_url: Override the default server URL for this method + :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + :param http_headers: Additional headers to set or replace on requests. + """ + base_url = None + url_variables = None + if timeout_ms is None: + timeout_ms = self.sdk_configuration.timeout_ms + + if server_url is not None: + base_url = server_url + else: + base_url = self._get_url(base_url, url_variables) + + if not isinstance(request, BaseModel): + request = utils.unmarshal(request, operations.ListTemplatesRequest) + request = cast(operations.ListTemplatesRequest, request) + + req = self._build_request_async( + method="GET", + path="/api/v1/templates/", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=False, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + http_headers=http_headers, + security=self.sdk_configuration.security, + timeout_ms=timeout_ms, + ) + + if retries == UNSET: + if self.sdk_configuration.retry_config is not UNSET: + retries = self.sdk_configuration.retry_config + else: + retries = utils.RetryConfig( + "backoff", utils.BackoffStrategy(3000, 720000, 1.88, 1800000), True + ) + + retry_config = None + if isinstance(retries, utils.RetryConfig): + retry_config = (retries, ["5xx"]) + + http_res = await self.do_request_async( + hook_ctx=HookContext( + config=self.sdk_configuration, + base_url=base_url or "", + operation_id="list_templates", + oauth2_scopes=[], + security_source=self.sdk_configuration.security, + ), + request=req, + error_status_codes=["422", "4XX", "5XX"], + retry_config=retry_config, + ) + + response_data: Any = None + if utils.match_response(http_res, "200", "application/json"): + return operations.ListTemplatesResponse( + response_list_templates=unmarshal_json_response( + Optional[List[shared.TemplateListItem]], http_res + ), + status_code=http_res.status_code, + content_type=http_res.headers.get("Content-Type") or "", + raw_response=http_res, + ) + if utils.match_response(http_res, "422", "application/json"): + response_data = unmarshal_json_response( + errors.HTTPValidationErrorData, http_res + ) + raise errors.HTTPValidationError(response_data, http_res) + if utils.match_response(http_res, "4XX", "*"): + http_res_text = await utils.stream_to_text_async(http_res) + raise errors.SDKError("API error occurred", http_res, http_res_text) + if utils.match_response(http_res, "5XX", "*"): + http_res_text = await utils.stream_to_text_async(http_res) + raise errors.SDKError("API error occurred", http_res, http_res_text) + + raise errors.SDKError("Unexpected response received", http_res) From daa5abf06e5580c89a738a44f3a9b50228624d73 Mon Sep 17 00:00:00 2001 From: Jordan Homan Date: Fri, 12 Dec 2025 17:28:49 -0500 Subject: [PATCH 2/4] re-apply test changes --- _test_contract/platform_api/test_jobs.py | 57 +++++ _test_contract/platform_api/test_templates.py | 141 ++++++++++ .../test_platform_workflow_lifecycle.py | 242 ++++++++++++++++++ pyproject.toml | 2 +- 4 files changed, 441 insertions(+), 1 deletion(-) create mode 100644 _test_contract/platform_api/test_templates.py create mode 100644 _test_unstructured_client/integration/test_platform_workflow_lifecycle.py diff --git a/_test_contract/platform_api/test_jobs.py b/_test_contract/platform_api/test_jobs.py index 7747decc..bfd39508 100644 --- a/_test_contract/platform_api/test_jobs.py +++ b/_test_contract/platform_api/test_jobs.py @@ -165,3 +165,60 @@ def test_cancel_job(httpx_mock, platform_client: UnstructuredClient, platform_ap request = requests[0] assert request.method == "POST" assert request.url == url + + +def test_create_job(httpx_mock, platform_client: UnstructuredClient, platform_api_url: str): + import json + + url = f"{platform_api_url}/api/v1/jobs/" + + httpx_mock.add_response( + method="POST", + status_code=200, + headers={"Content-Type": "application/json"}, + json={ + "created_at": "2025-06-22T11:37:21.648Z", + "id": "fcdc4994-eea5-425c-91fa-e03f2bd8030d", + "status": "SCHEDULED", + "runtime": None, + "workflow_id": "16b80fee-64dc-472d-8f26-1d7729b6423d", + "workflow_name": "job-fcdc4994", + "input_file_ids": ["upload-test-file-123"], + "output_node_files": [ + { + "node_id": "93fc2ce8-e7c8-424f-a6aa-41460fc5d35d", + "file_id": "upload-test-file-123", + "node_type": "partition", + "node_subtype": "unstructured_api", + } + ], + "job_type": "template", + }, + url=url, + ) + + # request_data should be a JSON string containing the job creation data + request_data = json.dumps({ + "template_id": "hi_res_partition", + }) + + create_job_response = platform_client.jobs.create_job( + request=operations.CreateJobRequest( + body_create_job=shared.BodyCreateJob( + request_data=request_data, + ) + ) + ) + assert create_job_response.status_code == 200 + + requests = httpx_mock.get_requests() + assert len(requests) == 1 + request = requests[0] + assert request.method == "POST" + assert request.url == url + + job = create_job_response.job_information + assert job.id == "fcdc4994-eea5-425c-91fa-e03f2bd8030d" + assert job.status == "SCHEDULED" + assert job.job_type == "template" + assert job.created_at == datetime.fromisoformat("2025-06-22T11:37:21.648+00:00") diff --git a/_test_contract/platform_api/test_templates.py b/_test_contract/platform_api/test_templates.py new file mode 100644 index 00000000..2875353e --- /dev/null +++ b/_test_contract/platform_api/test_templates.py @@ -0,0 +1,141 @@ +from datetime import datetime + +import pytest + +from unstructured_client import UnstructuredClient +from unstructured_client.models import operations +from unstructured_client.models.errors import SDKError + + +def test_list_templates(httpx_mock, platform_client: UnstructuredClient, platform_api_url: str): + url = f"{platform_api_url}/api/v1/templates/" + + httpx_mock.add_response( + method="GET", + headers={"Content-Type": "application/json"}, + json=[ + { + "id": "hi_res_partition", + "name": "High Resolution Partition", + "description": "Partition documents with high resolution strategy", + "version": "1.0.0", + "nodes": [ + { + "id": "93fc2ce8-e7c8-424f-a6aa-41460fc5d35d", + "name": "partition step", + "type": "partition", + "subtype": "unstructured_api", + } + ], + "edges": [], + }, + { + "id": "hi_res_and_enrichment", + "name": "High Resolution and Enrichment", + "description": "Partition with enrichment", + "version": "1.0.0", + "nodes": [], + "edges": [], + }, + ], + url=url, + ) + + templates_response = platform_client.templates.list_templates( + request=operations.ListTemplatesRequest() + ) + assert templates_response.status_code == 200 + + requests = httpx_mock.get_requests() + assert len(requests) == 1 + request = requests[0] + assert request.method == "GET" + assert request.url == url + + assert "templates" in templates_response.response_list_templates + templates = templates_response.response_list_templates + assert len(templates) == 2 + assert templates[0]["id"] == "hi_res_partition" + assert templates[0]["name"] == "High Resolution Partition" + assert templates[1]["id"] == "hi_res_and_enrichment" + + +def test_get_template(httpx_mock, platform_client: UnstructuredClient, platform_api_url: str): + url = f"{platform_api_url}/api/v1/templates/hi_res_partition" + + httpx_mock.add_response( + method="GET", + headers={"Content-Type": "application/json"}, + json={ + "id": "hi_res_partition", + "name": "High Resolution Partition", + "description": "Partition documents with high resolution strategy", + "version": "1.0.0", + "nodes": [ + { + "id": "93fc2ce8-e7c8-424f-a6aa-41460fc5d35d", + "name": "partition step", + "type": "partition", + "subtype": "unstructured_api", + "settings": { + "strategy": "fast", + "include_page_breaks": False, + }, + } + ], + "edges": [ + { + "source_id": "00000000-0000-0000-0000-000000000001-downloader", + "destination_id": "93fc2ce8-e7c8-424f-a6aa-41460fc5d35d", + } + ], + }, + url=url, + ) + + template_response = platform_client.templates.get_template( + request=operations.GetTemplateRequest(template_id="hi_res_partition") + ) + assert template_response.status_code == 200 + + requests = httpx_mock.get_requests() + assert len(requests) == 1 + request = requests[0] + assert request.method == "GET" + assert request.url == url + + assert "template" in template_response.response_get_template + template = template_response.response_get_template + assert template["id"] == "hi_res_partition" + assert template["name"] == "High Resolution Partition" + assert len(template["nodes"]) == 1 + assert template["nodes"][0]["id"] == "93fc2ce8-e7c8-424f-a6aa-41460fc5d35d" + + +def test_get_template_not_found( + httpx_mock, platform_client: UnstructuredClient, platform_api_url: str +): + url = f"{platform_api_url}/api/v1/templates/nonexistent_template" + + httpx_mock.add_response( + method="GET", + status_code=404, + headers={"Content-Type": "application/json"}, + json={"detail": "Template nonexistent_template not found"}, + url=url, + ) + + with pytest.raises(SDKError) as e: + platform_client.templates.get_template( + request=operations.GetTemplateRequest(template_id="nonexistent_template") + ) + + assert e.value.status_code == 404 + assert "API error occurred" in e.value.message + + requests = httpx_mock.get_requests() + assert len(requests) == 1 + request = requests[0] + assert request.method == "GET" + assert request.url == url + diff --git a/_test_unstructured_client/integration/test_platform_workflow_lifecycle.py b/_test_unstructured_client/integration/test_platform_workflow_lifecycle.py new file mode 100644 index 00000000..c3e43d10 --- /dev/null +++ b/_test_unstructured_client/integration/test_platform_workflow_lifecycle.py @@ -0,0 +1,242 @@ +""" +Integration test for the complete platform API workflow lifecycle. + +This test exercises the full workflow lifecycle: +- List workflows +- Create workflow +- Get workflow +- Delete workflow +- Create job (on-demand) +- Get job +- List jobs +- Get template +- List templates +""" + +from __future__ import annotations + +import json +import os +from pathlib import Path +from typing import Optional + +import pytest +from unstructured_client import UnstructuredClient +from unstructured_client.models import shared, operations +from unstructured_client.models.errors import SDKError + + +@pytest.fixture(scope="module") +def doc_path() -> Path: + """Get the path to sample documents directory.""" + return Path(__file__).resolve().parents[2] / "_sample_docs" + + +@pytest.fixture(scope="function") +def platform_client() -> UnstructuredClient: + """Create a platform API client for integration tests.""" + api_key = os.getenv("UNSTRUCTURED_API_KEY") + if not api_key: + pytest.skip("UNSTRUCTURED_API_KEY environment variable not set") + + platform_url = os.getenv("PLATFORM_API_URL", "https://platform.unstructuredapp.io") + + _client = UnstructuredClient( + api_key_auth=api_key, + server_url=platform_url, + ) + yield _client + + +@pytest.fixture(scope="function") +def created_workflow_id(platform_client: UnstructuredClient) -> Optional[str]: + """Fixture to create a workflow and clean it up after the test.""" + workflow_id = None + try: + # Create a workflow for testing + create_response = platform_client.workflows.create_workflow( + request=operations.CreateWorkflowRequest( + create_workflow=shared.CreateWorkflow( + name="test_integration_workflow", + workflow_type="basic", + ) + ) + ) + assert create_response.status_code == 200 + workflow_id = str(create_response.workflow_information.id) + yield workflow_id + finally: + # Cleanup: delete the workflow if it was created + # Note: The test itself may delete it, so we check if it still exists + if workflow_id: + try: + # Try to get the workflow first to see if it still exists + platform_client.workflows.get_workflow( + request=operations.GetWorkflowRequest(workflow_id=workflow_id) + ) + # If we get here, it exists, so delete it + platform_client.workflows.delete_workflow( + request=operations.DeleteWorkflowRequest(workflow_id=workflow_id) + ) + except SDKError: + # Workflow already deleted or doesn't exist, ignore + pass + except Exception: + pass # Ignore other cleanup errors + + +def test_workflow_lifecycle( + platform_client: UnstructuredClient, + created_workflow_id: Optional[str], + doc_path: Path, +): + """ + Test the complete workflow lifecycle including workflows, jobs, and templates. + """ + # 1. List workflows + list_response = platform_client.workflows.list_workflows( + request=operations.ListWorkflowsRequest() + ) + assert list_response.status_code == 200 + assert isinstance(list_response.response_list_workflows, list) + + # 2. Get workflow (using the created workflow) + if created_workflow_id: + get_response = platform_client.workflows.get_workflow( + request=operations.GetWorkflowRequest(workflow_id=created_workflow_id) + ) + assert get_response.status_code == 200 + assert str(get_response.workflow_information.id) == created_workflow_id + assert get_response.workflow_information.name == "test_integration_workflow" + + # 3. List templates + list_templates_response = platform_client.templates.list_templates( + request=operations.ListTemplatesRequest() + ) + assert list_templates_response.status_code == 200 + assert "templates" in list_templates_response.response_list_templates + templates = list_templates_response.response_list_templates["templates"] + assert isinstance(templates, list) + assert len(templates) > 0 + + # Verify we have expected templates + template_ids = [t.get("id") for t in templates] + assert "hi_res_partition" in template_ids or "hi_res_and_enrichment" in template_ids + + # 4. Get template + template_id = "hi_res_partition" + if template_id not in template_ids and len(templates) > 0: + template_id = templates[0].get("id") + + get_template_response = platform_client.templates.get_template( + request=operations.GetTemplateRequest(template_id=template_id) + ) + assert get_template_response.status_code == 200 + assert "template" in get_template_response.response_get_template + template = get_template_response.response_get_template["template"] + assert template.get("id") == template_id + + # 5. Create job (on-demand using template) + request_data = json.dumps({ + "template_id": template_id, + }) + + # Read a sample PDF file + pdf_filename = "layout-parser-paper-fast.pdf" + pdf_path = doc_path / pdf_filename + if not pdf_path.exists(): + # Fallback to another common test file + pdf_filename = "list-item-example-1.pdf" + pdf_path = doc_path / pdf_filename + + with open(pdf_path, "rb") as f: + pdf_content = f.read() + + create_job_response = platform_client.jobs.create_job( + request=operations.CreateJobRequest( + body_create_job=shared.BodyCreateJob( + request_data=request_data, + input_files=[ + shared.InputFiles( + content=pdf_content, + file_name=pdf_filename, + content_type="application/pdf", + ) + ], + ) + ) + ) + assert create_job_response.status_code == 200 + job_id = str(create_job_response.job_information.id) + assert job_id is not None + assert create_job_response.job_information.status in ["SCHEDULED", "IN_PROGRESS"] + + # 6. Get job + get_job_response = platform_client.jobs.get_job( + request=operations.GetJobRequest(job_id=job_id) + ) + assert get_job_response.status_code == 200 + assert str(get_job_response.job_information.id) == job_id + + # 7. List jobs + list_jobs_response = platform_client.jobs.list_jobs( + request=operations.ListJobsRequest() + ) + assert list_jobs_response.status_code == 200 + assert isinstance(list_jobs_response.response_list_jobs, list) + + # 8. Delete workflow (cleanup is handled by fixture, but we can verify it works) + if created_workflow_id: + delete_response = platform_client.workflows.delete_workflow( + request=operations.DeleteWorkflowRequest(workflow_id=created_workflow_id) + ) + assert delete_response.status_code in [200, 204] + + +def test_workflow_lifecycle_with_custom_dag_job(platform_client: UnstructuredClient): + """ + Test creating a job with a custom DAG (ephemeral job type). + """ + # 1. List templates to understand the structure + list_templates_response = platform_client.templates.list_templates( + request=operations.ListTemplatesRequest() + ) + assert list_templates_response.status_code == 200 + + # 2. Create a custom DAG job + # Using a simple partitioner node + custom_nodes = [ + { + "id": "93fc2ce8-e7c8-424f-a6aa-41460fc5d35d", + "name": "partition step", + "type": "partition", + "subtype": "unstructured_api", + "settings": { + "strategy": "fast", + "include_page_breaks": False, + }, + } + ] + + request_data = json.dumps({ + "job_nodes": custom_nodes, + }) + + create_job_response = platform_client.jobs.create_job( + request=operations.CreateJobRequest( + body_create_job=shared.BodyCreateJob( + request_data=request_data, + ) + ) + ) + assert create_job_response.status_code == 200 + job_id = str(create_job_response.job_information.id) + assert job_id is not None + + # 3. Verify the job can be retrieved + get_job_response = platform_client.jobs.get_job( + request=operations.GetJobRequest(job_id=job_id) + ) + assert get_job_response.status_code == 200 + assert str(get_job_response.job_information.id) == job_id + diff --git a/pyproject.toml b/pyproject.toml index 92e7a302..552e5c53 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ name = "unstructured-client" version = "0.42.4" description = "Python Client SDK for Unstructured API" authors = [{ name = "Unstructured" },] -readme = "README-PYPI.md" +readme = "README.md" requires-python = ">=3.9.2" dependencies = [ "aiofiles >=24.1.0", From c0bf55f8e7da967be6763d513a19745fd68ef07e Mon Sep 17 00:00:00 2001 From: Jordan Homan Date: Fri, 12 Dec 2025 17:35:56 -0500 Subject: [PATCH 3/4] fix tests --- _test_contract/platform_api/test_templates.py | 40 ++++++------------- 1 file changed, 13 insertions(+), 27 deletions(-) diff --git a/_test_contract/platform_api/test_templates.py b/_test_contract/platform_api/test_templates.py index 2875353e..8d1f8c8f 100644 --- a/_test_contract/platform_api/test_templates.py +++ b/_test_contract/platform_api/test_templates.py @@ -19,23 +19,14 @@ def test_list_templates(httpx_mock, platform_client: UnstructuredClient, platfor "name": "High Resolution Partition", "description": "Partition documents with high resolution strategy", "version": "1.0.0", - "nodes": [ - { - "id": "93fc2ce8-e7c8-424f-a6aa-41460fc5d35d", - "name": "partition step", - "type": "partition", - "subtype": "unstructured_api", - } - ], - "edges": [], + "last_updated": "2024-01-01T00:00:00.000000", }, { "id": "hi_res_and_enrichment", "name": "High Resolution and Enrichment", "description": "Partition with enrichment", "version": "1.0.0", - "nodes": [], - "edges": [], + "last_updated": "2024-01-01T00:00:00.000000", }, ], url=url, @@ -52,12 +43,12 @@ def test_list_templates(httpx_mock, platform_client: UnstructuredClient, platfor assert request.method == "GET" assert request.url == url - assert "templates" in templates_response.response_list_templates templates = templates_response.response_list_templates + assert templates is not None assert len(templates) == 2 - assert templates[0]["id"] == "hi_res_partition" - assert templates[0]["name"] == "High Resolution Partition" - assert templates[1]["id"] == "hi_res_and_enrichment" + assert templates[0].id == "hi_res_partition" + assert templates[0].name == "High Resolution Partition" + assert templates[1].id == "hi_res_and_enrichment" def test_get_template(httpx_mock, platform_client: UnstructuredClient, platform_api_url: str): @@ -71,6 +62,7 @@ def test_get_template(httpx_mock, platform_client: UnstructuredClient, platform_ "name": "High Resolution Partition", "description": "Partition documents with high resolution strategy", "version": "1.0.0", + "last_updated": "2024-01-01T00:00:00.000000", "nodes": [ { "id": "93fc2ce8-e7c8-424f-a6aa-41460fc5d35d", @@ -83,12 +75,6 @@ def test_get_template(httpx_mock, platform_client: UnstructuredClient, platform_ }, } ], - "edges": [ - { - "source_id": "00000000-0000-0000-0000-000000000001-downloader", - "destination_id": "93fc2ce8-e7c8-424f-a6aa-41460fc5d35d", - } - ], }, url=url, ) @@ -104,12 +90,12 @@ def test_get_template(httpx_mock, platform_client: UnstructuredClient, platform_ assert request.method == "GET" assert request.url == url - assert "template" in template_response.response_get_template - template = template_response.response_get_template - assert template["id"] == "hi_res_partition" - assert template["name"] == "High Resolution Partition" - assert len(template["nodes"]) == 1 - assert template["nodes"][0]["id"] == "93fc2ce8-e7c8-424f-a6aa-41460fc5d35d" + template = template_response.template_detail + assert template is not None + assert template.id == "hi_res_partition" + assert template.name == "High Resolution Partition" + assert len(template.nodes) == 1 + assert template.nodes[0].id == "93fc2ce8-e7c8-424f-a6aa-41460fc5d35d" def test_get_template_not_found( From f94a339c42d0c4b71a90af2031b1e023238b18bb Mon Sep 17 00:00:00 2001 From: Jordan Homan Date: Fri, 12 Dec 2025 18:15:10 -0500 Subject: [PATCH 4/4] update --- .../test_platform_workflow_lifecycle.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/_test_unstructured_client/integration/test_platform_workflow_lifecycle.py b/_test_unstructured_client/integration/test_platform_workflow_lifecycle.py index c3e43d10..6515398e 100644 --- a/_test_unstructured_client/integration/test_platform_workflow_lifecycle.py +++ b/_test_unstructured_client/integration/test_platform_workflow_lifecycle.py @@ -114,27 +114,27 @@ def test_workflow_lifecycle( request=operations.ListTemplatesRequest() ) assert list_templates_response.status_code == 200 - assert "templates" in list_templates_response.response_list_templates - templates = list_templates_response.response_list_templates["templates"] + assert list_templates_response.response_list_templates is not None + templates = list_templates_response.response_list_templates assert isinstance(templates, list) assert len(templates) > 0 # Verify we have expected templates - template_ids = [t.get("id") for t in templates] + template_ids = [t.id for t in templates] assert "hi_res_partition" in template_ids or "hi_res_and_enrichment" in template_ids # 4. Get template template_id = "hi_res_partition" if template_id not in template_ids and len(templates) > 0: - template_id = templates[0].get("id") + template_id = templates[0].id get_template_response = platform_client.templates.get_template( request=operations.GetTemplateRequest(template_id=template_id) ) assert get_template_response.status_code == 200 - assert "template" in get_template_response.response_get_template - template = get_template_response.response_get_template["template"] - assert template.get("id") == template_id + assert get_template_response.template_detail is not None + template = get_template_response.template_detail + assert template.id == template_id # 5. Create job (on-demand using template) request_data = json.dumps({