|
57 | 57 | ] |
58 | 58 |
|
59 | 59 |
|
60 | | -def assert_job_cancel(pipeline, client: MLClient): |
61 | | - job = client.jobs.create_or_update(pipeline) |
| 60 | +def assert_job_cancel(pipeline, client: MLClient, experiment_name=None): |
| 61 | + job = client.jobs.create_or_update(pipeline, experiment_name=experiment_name) |
62 | 62 | try: |
63 | 63 | cancel_poller = client.jobs.begin_cancel(job.name) |
64 | 64 | assert isinstance(cancel_poller, LROPoller) |
65 | | - assert cancel_poller.result() is None |
| 65 | + # skip wait for cancel result to reduce test run duration. |
| 66 | + # assert cancel_poller.result() is None |
66 | 67 | except HttpResponseError: |
67 | 68 | pass |
68 | 69 | return job |
@@ -1595,10 +1596,8 @@ def parallel_in_pipeline(job_data_path, score_model): |
1595 | 1596 | ), |
1596 | 1597 | ) |
1597 | 1598 | # submit pipeline job |
1598 | | - pipeline_job = client.jobs.create_or_update(pipeline, experiment_name="parallel_in_pipeline") |
1599 | | - cancel_poller = client.jobs.begin_cancel(pipeline_job.name) |
1600 | | - assert isinstance(cancel_poller, LROPoller) |
1601 | | - assert cancel_poller.result() is None |
| 1599 | + pipeline_job = assert_job_cancel(pipeline, client, experiment_name="parallel_in_pipeline") |
| 1600 | + |
1602 | 1601 | # check required fields in job dict |
1603 | 1602 | job_dict = pipeline_job._to_dict() |
1604 | 1603 | expected_keys = ["status", "properties", "tags", "creation_context"] |
@@ -1628,10 +1627,7 @@ def parallel_in_pipeline(job_data_path): |
1628 | 1627 | ), |
1629 | 1628 | ) |
1630 | 1629 | # submit pipeline job |
1631 | | - pipeline_job = client.jobs.create_or_update(pipeline, experiment_name="parallel_in_pipeline") |
1632 | | - cancel_poller = client.jobs.begin_cancel(pipeline_job.name) |
1633 | | - assert isinstance(cancel_poller, LROPoller) |
1634 | | - assert cancel_poller.result() is None |
| 1630 | + pipeline_job = assert_job_cancel(pipeline, client, experiment_name="parallel_in_pipeline") |
1635 | 1631 | # check required fields in job dict |
1636 | 1632 | job_dict = pipeline_job._to_dict() |
1637 | 1633 | expected_keys = ["status", "properties", "tags", "creation_context"] |
@@ -1828,13 +1824,7 @@ def parallel_in_pipeline(job_data_path): |
1828 | 1824 | ), |
1829 | 1825 | ) |
1830 | 1826 | # submit job to workspace |
1831 | | - pipeline_job = client.jobs.create_or_update( |
1832 | | - pipeline, |
1833 | | - experiment_name="parallel_in_pipeline", |
1834 | | - ) |
1835 | | - cancel_poller = client.jobs.begin_cancel(pipeline_job.name) |
1836 | | - assert isinstance(cancel_poller, LROPoller) |
1837 | | - assert cancel_poller.result() is None |
| 1827 | + pipeline_job = assert_job_cancel(pipeline, client, experiment_name="parallel_in_pipeline") |
1838 | 1828 | omit_fields = [ |
1839 | 1829 | "jobs.parallel_node.task.code", |
1840 | 1830 | "jobs.parallel_node.task.environment", |
@@ -1919,10 +1909,7 @@ def parallel_in_pipeline(job_data_path): |
1919 | 1909 | pipeline.outputs.job_out_data.mode = "upload" |
1920 | 1910 |
|
1921 | 1911 | # submit pipeline job |
1922 | | - pipeline_job = client.jobs.create_or_update(pipeline, experiment_name="parallel_in_pipeline") |
1923 | | - cancel_poller = client.jobs.begin_cancel(pipeline_job.name) |
1924 | | - assert isinstance(cancel_poller, LROPoller) |
1925 | | - assert cancel_poller.result() is None |
| 1912 | + pipeline_job = assert_job_cancel(pipeline, client, experiment_name="parallel_in_pipeline") |
1926 | 1913 |
|
1927 | 1914 | omit_fields = [ |
1928 | 1915 | "jobs.*.task.code", |
@@ -2361,10 +2348,7 @@ def spark_pipeline_from_yaml(iris_data): |
2361 | 2348 | pipeline.outputs.output.type = "uri_file" |
2362 | 2349 |
|
2363 | 2350 | # submit pipeline job |
2364 | | - pipeline_job = client.jobs.create_or_update(pipeline, experiment_name="spark_in_pipeline") |
2365 | | - cancel_poller = client.jobs.begin_cancel(pipeline_job.name) |
2366 | | - assert isinstance(cancel_poller, LROPoller) |
2367 | | - assert cancel_poller.result() is None |
| 2351 | + pipeline_job = assert_job_cancel(pipeline, client, experiment_name="spark_in_pipeline") |
2368 | 2352 | # check required fields in job dict |
2369 | 2353 | job_dict = pipeline_job._to_dict() |
2370 | 2354 | expected_keys = ["status", "properties", "tags", "creation_context"] |
|
0 commit comments