Skip to content

Commit aab265f

Browse files
authored
fix: return git path as Code object (Azure#27195)
1 parent df1c244 commit aab265f

File tree

7 files changed

+78
-95
lines changed

7 files changed

+78
-95
lines changed

sdk/ml/azure-ai-ml/azure/ai/ml/entities/_component/component.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -511,7 +511,7 @@ def _resolve_local_code(self):
511511
code = getattr(self, "code")
512512
# special check for git path code value
513513
if code is not None and isinstance(code, str) and code.startswith("git+"):
514-
yield code
514+
yield Code(base_path=self._base_path, path=code)
515515
elif code is not None and os.path.isfile(code):
516516
yield Code(base_path=self._base_path, path=code)
517517
else:

sdk/ml/azure-ai-ml/tests/component/unittests/test_command_component_entity.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -202,7 +202,7 @@ def test_command_component_code_git_path(self):
202202
yaml_dict = load_yaml(yaml_path)
203203
component = load_component(yaml_path)
204204
with component._resolve_local_code() as code:
205-
assert code == yaml_dict["code"]
205+
assert code.path == yaml_dict["code"]
206206

207207
@pytest.mark.skipif(
208208
sys.version_info[1] == 11,

sdk/ml/azure-ai-ml/tests/dsl/e2etests/test_dsl_pipeline.py

Lines changed: 10 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -57,12 +57,13 @@
5757
]
5858

5959

60-
def assert_job_cancel(pipeline, client: MLClient):
61-
job = client.jobs.create_or_update(pipeline)
60+
def assert_job_cancel(pipeline, client: MLClient, experiment_name=None):
61+
job = client.jobs.create_or_update(pipeline, experiment_name=experiment_name)
6262
try:
6363
cancel_poller = client.jobs.begin_cancel(job.name)
6464
assert isinstance(cancel_poller, LROPoller)
65-
assert cancel_poller.result() is None
65+
# skip wait for cancel result to reduce test run duration.
66+
# assert cancel_poller.result() is None
6667
except HttpResponseError:
6768
pass
6869
return job
@@ -1595,10 +1596,8 @@ def parallel_in_pipeline(job_data_path, score_model):
15951596
),
15961597
)
15971598
# submit pipeline job
1598-
pipeline_job = client.jobs.create_or_update(pipeline, experiment_name="parallel_in_pipeline")
1599-
cancel_poller = client.jobs.begin_cancel(pipeline_job.name)
1600-
assert isinstance(cancel_poller, LROPoller)
1601-
assert cancel_poller.result() is None
1599+
pipeline_job = assert_job_cancel(pipeline, client, experiment_name="parallel_in_pipeline")
1600+
16021601
# check required fields in job dict
16031602
job_dict = pipeline_job._to_dict()
16041603
expected_keys = ["status", "properties", "tags", "creation_context"]
@@ -1628,10 +1627,7 @@ def parallel_in_pipeline(job_data_path):
16281627
),
16291628
)
16301629
# submit pipeline job
1631-
pipeline_job = client.jobs.create_or_update(pipeline, experiment_name="parallel_in_pipeline")
1632-
cancel_poller = client.jobs.begin_cancel(pipeline_job.name)
1633-
assert isinstance(cancel_poller, LROPoller)
1634-
assert cancel_poller.result() is None
1630+
pipeline_job = assert_job_cancel(pipeline, client, experiment_name="parallel_in_pipeline")
16351631
# check required fields in job dict
16361632
job_dict = pipeline_job._to_dict()
16371633
expected_keys = ["status", "properties", "tags", "creation_context"]
@@ -1828,13 +1824,7 @@ def parallel_in_pipeline(job_data_path):
18281824
),
18291825
)
18301826
# submit job to workspace
1831-
pipeline_job = client.jobs.create_or_update(
1832-
pipeline,
1833-
experiment_name="parallel_in_pipeline",
1834-
)
1835-
cancel_poller = client.jobs.begin_cancel(pipeline_job.name)
1836-
assert isinstance(cancel_poller, LROPoller)
1837-
assert cancel_poller.result() is None
1827+
pipeline_job = assert_job_cancel(pipeline, client, experiment_name="parallel_in_pipeline")
18381828
omit_fields = [
18391829
"jobs.parallel_node.task.code",
18401830
"jobs.parallel_node.task.environment",
@@ -1919,10 +1909,7 @@ def parallel_in_pipeline(job_data_path):
19191909
pipeline.outputs.job_out_data.mode = "upload"
19201910

19211911
# submit pipeline job
1922-
pipeline_job = client.jobs.create_or_update(pipeline, experiment_name="parallel_in_pipeline")
1923-
cancel_poller = client.jobs.begin_cancel(pipeline_job.name)
1924-
assert isinstance(cancel_poller, LROPoller)
1925-
assert cancel_poller.result() is None
1912+
pipeline_job = assert_job_cancel(pipeline, client, experiment_name="parallel_in_pipeline")
19261913

19271914
omit_fields = [
19281915
"jobs.*.task.code",
@@ -2361,10 +2348,7 @@ def spark_pipeline_from_yaml(iris_data):
23612348
pipeline.outputs.output.type = "uri_file"
23622349

23632350
# submit pipeline job
2364-
pipeline_job = client.jobs.create_or_update(pipeline, experiment_name="spark_in_pipeline")
2365-
cancel_poller = client.jobs.begin_cancel(pipeline_job.name)
2366-
assert isinstance(cancel_poller, LROPoller)
2367-
assert cancel_poller.result() is None
2351+
pipeline_job = assert_job_cancel(pipeline, client, experiment_name="spark_in_pipeline")
23682352
# check required fields in job dict
23692353
job_dict = pipeline_job._to_dict()
23702354
expected_keys = ["status", "properties", "tags", "creation_context"]

sdk/ml/azure-ai-ml/tests/dsl/e2etests/test_dsl_pipeline_on_registry.py

Lines changed: 15 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,18 @@
1313
from devtools_testutils import AzureRecordedTestCase
1414

1515

16+
def assert_job_cancel(pipeline, client: MLClient, experiment_name=None):
17+
job = client.jobs.create_or_update(pipeline, experiment_name=experiment_name)
18+
try:
19+
cancel_poller = client.jobs.begin_cancel(job.name)
20+
assert isinstance(cancel_poller, LROPoller)
21+
# skip wait for cancel result to reduce test run duration.
22+
# assert cancel_poller.result() is None
23+
except HttpResponseError:
24+
pass
25+
return job
26+
27+
1628
@pytest.mark.usefixtures("enable_pipeline_private_preview_features", "recorded_test")
1729
@pytest.mark.timeout(timeout=_DSL_TIMEOUT_SECOND, method=_PYTEST_TIMEOUT_METHOD)
1830
@pytest.mark.e2etest
@@ -65,9 +77,7 @@ def score_pipeline_with_registry_model(model_input, test_data):
6577
test_data=test_data
6678
)
6779
pipeline_job.settings.default_compute = "cpu-cluster"
68-
pipeline_job = client.jobs.create_or_update(pipeline_job)
69-
cancel_poller = client.jobs.begin_cancel(pipeline_job.name)
70-
assert isinstance(cancel_poller, LROPoller)
80+
assert_job_cancel(pipeline_job, client)
7181

7282
@pytest.mark.skip(reason="request body still exits when re-record and will raise error "
7383
"'Unable to find a record for the request' in playback mode")
@@ -101,9 +111,7 @@ def score_pipeline_with_registry_model(model_input, test_data):
101111
model_input=pipeline_score_model, test_data=test_data
102112
)
103113
pipeline_job.settings.default_compute = "cpu-cluster"
104-
pipeline_job = client.jobs.create_or_update(pipeline_job)
105-
cancel_poller = client.jobs.begin_cancel(pipeline_job.name)
106-
assert isinstance(cancel_poller, LROPoller)
114+
assert_job_cancel(pipeline_job, client)
107115

108116
@pytest.mark.skip(reason="request body still exits when re-record and will raise error "
109117
"'Unable to find a record for the request' in playback mode")
@@ -136,6 +144,4 @@ def score_pipeline_with_registry_model(model_input, test_data):
136144
test_data=test_data
137145
)
138146
pipeline_job.settings.default_compute = "cpu-cluster"
139-
pipeline_job = client.jobs.create_or_update(pipeline_job)
140-
cancel_poller = client.jobs.begin_cancel(pipeline_job.name)
141-
assert isinstance(cancel_poller, LROPoller)
147+
assert_job_cancel(pipeline_job, client)

0 commit comments

Comments
 (0)