Skip to content

Commit c6be14f

Browse files
authored
Revert "[ML][Pipelines] feat: load component from flow (Azure#31632) (Azure#31911)
This reverts commit 53ab388.
1 parent d7139c1 commit c6be14f

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

51 files changed

+159
-2626
lines changed

.vscode/cspell.json

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,6 @@
8888
"sdk/ml/azure-ai-ml/tests/**",
8989
"sdk/ml/azure-ai-ml/swagger/**",
9090
"sdk/ml/azure-ai-ml/NOTICE.txt",
91-
"sdk/ml/azure-ai-ml/.pre-commit-config.yaml",
9291
"sdk/loadtestservice/azure-developer-loadtesting/**",
9392
"sdk/translation/azure-ai-translation-text/azure/ai/translation/text/_serialization.py",
9493
"sdk/translation/azure-ai-translation-text/tests/test_break_sentence.py",

sdk/ml/azure-ai-ml/.pre-commit-config.yaml

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -26,8 +26,8 @@ repos:
2626
args: ['--settings-file', 'sdk/ml/azure-ai-ml/pyproject.toml']
2727
- repo: local
2828
hooks:
29-
- id: pylint-dependencies-check
30-
name: pylint-dependencies-check
29+
- id: pylint
30+
name: pylint
3131
entry: python
3232
language: system
3333
types: [python]
@@ -69,9 +69,9 @@ repos:
6969
print(f"Please run the following command to install the correct version of {packagename}")
7070
print(f"\tpython -m pip install {packagename}=={required_version} {' '.join(install_args)}")
7171
sys.exit(1)
72-
- id: pylint
73-
name: pylint
74-
entry: pylint
75-
language: system
76-
args: [--rcfile=pylintrc, --output-format=parseable]
77-
files: '^sdk/ml/azure-ai-ml/azure/ai/ml/.*\.py$'
72+
73+
# Run pylint
74+
os.execl(sys.executable, sys.executable, "-m", "pylint", *sys.argv[1:])
75+
# cspell:disable-next-line
76+
- "--rcfile=pylintrc"
77+
- "--output-format=parseable"

sdk/ml/azure-ai-ml/assets.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,5 +2,5 @@
22
"AssetsRepo": "Azure/azure-sdk-assets",
33
"AssetsRepoPrefixPath": "python",
44
"TagPrefix": "python/ml/azure-ai-ml",
5-
"Tag": "python/ml/azure-ai-ml_b8d9d78843"
5+
"Tag": "python/ml/azure-ai-ml_d05969c2b4"
66
}

sdk/ml/azure-ai-ml/azure/ai/ml/_schema/component/__init__.py

Lines changed: 8 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -5,21 +5,21 @@
55
__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
66

77
from .command_component import AnonymousCommandComponentSchema, CommandComponentSchema, ComponentFileRefField
8-
from .component import ComponentSchema, ComponentYamlRefField
8+
from .component import ComponentSchema
9+
from .import_component import AnonymousImportComponentSchema, ImportComponentFileRefField, ImportComponentSchema
10+
from .parallel_component import AnonymousParallelComponentSchema, ParallelComponentFileRefField, ParallelComponentSchema
11+
from .spark_component import AnonymousSparkComponentSchema, SparkComponentFileRefField, SparkComponentSchema
912
from .data_transfer_component import (
1013
AnonymousDataTransferCopyComponentSchema,
11-
AnonymousDataTransferExportComponentSchema,
12-
AnonymousDataTransferImportComponentSchema,
1314
DataTransferCopyComponentFileRefField,
1415
DataTransferCopyComponentSchema,
15-
DataTransferExportComponentFileRefField,
16-
DataTransferExportComponentSchema,
16+
AnonymousDataTransferImportComponentSchema,
1717
DataTransferImportComponentFileRefField,
1818
DataTransferImportComponentSchema,
19+
AnonymousDataTransferExportComponentSchema,
20+
DataTransferExportComponentFileRefField,
21+
DataTransferExportComponentSchema,
1922
)
20-
from .import_component import AnonymousImportComponentSchema, ImportComponentFileRefField, ImportComponentSchema
21-
from .parallel_component import AnonymousParallelComponentSchema, ParallelComponentFileRefField, ParallelComponentSchema
22-
from .spark_component import AnonymousSparkComponentSchema, SparkComponentFileRefField, SparkComponentSchema
2323

2424
__all__ = [
2525
"ComponentSchema",
@@ -44,5 +44,4 @@
4444
"AnonymousDataTransferExportComponentSchema",
4545
"DataTransferExportComponentFileRefField",
4646
"DataTransferExportComponentSchema",
47-
"ComponentYamlRefField",
4847
]

sdk/ml/azure-ai-ml/azure/ai/ml/_schema/component/component.py

Lines changed: 3 additions & 51 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,8 @@
11
# ---------------------------------------------------------
22
# Copyright (c) Microsoft Corporation. All rights reserved.
33
# ---------------------------------------------------------
4-
from pathlib import Path
54

6-
from marshmallow import ValidationError, fields, post_dump, pre_dump, pre_load
7-
from marshmallow.fields import Field
5+
from marshmallow import fields, post_dump, pre_dump, pre_load
86

97
from azure.ai.ml._schema.component.input_output import InputPortSchema, OutputPortSchema, ParameterSchema
108
from azure.ai.ml._schema.core.fields import (
@@ -15,8 +13,8 @@
1513
UnionField,
1614
)
1715
from azure.ai.ml._schema.core.intellectual_property import IntellectualPropertySchema
18-
from azure.ai.ml._utils.utils import is_private_preview_enabled, load_yaml
19-
from azure.ai.ml.constants._common import BASE_PATH_CONTEXT_KEY, AzureMLResourceType
16+
from azure.ai.ml._utils.utils import is_private_preview_enabled
17+
from azure.ai.ml.constants._common import AzureMLResourceType
2018

2119
from .._utils.utils import _resolve_group_inputs_for_component
2220
from ..assets.asset import AssetSchema
@@ -28,52 +26,6 @@ def _get_field_name(self):
2826
return "Component"
2927

3028

31-
class ComponentYamlRefField(Field):
32-
"""Allows you to nest a :class:`Schema <marshmallow.Schema>`
33-
inside a yaml ref field.
34-
"""
35-
36-
def _jsonschema_type_mapping(self):
37-
schema = {"type": "string"}
38-
if self.name is not None:
39-
schema["title"] = self.name
40-
if self.dump_only:
41-
schema["readonly"] = True
42-
return schema
43-
44-
def _deserialize(self, value, attr, data, **kwargs):
45-
if not isinstance(value, str):
46-
raise ValidationError(f"Nested yaml ref field expected a string but got {type(value)}.")
47-
48-
base_path = Path(self.context[BASE_PATH_CONTEXT_KEY])
49-
50-
source_path = Path(value)
51-
# raise if the string is not a valid path, like "azureml:xxx"
52-
try:
53-
source_path.resolve()
54-
except OSError as ex:
55-
raise ValidationError(f"Nested file ref field expected a local path but got {value}.") from ex
56-
57-
if not source_path.is_absolute():
58-
source_path = base_path / source_path
59-
60-
if not source_path.is_file():
61-
raise ValidationError(
62-
f"Nested yaml ref field expected a local path but can't find {value} based on {base_path.as_posix()}."
63-
)
64-
65-
loaded_value = load_yaml(source_path)
66-
67-
# local import to avoid circular import
68-
from azure.ai.ml.entities import Component
69-
70-
component = Component._load(data=loaded_value, yaml_path=source_path) # pylint: disable=protected-access
71-
return component
72-
73-
def _serialize(self, value, attr, obj, **kwargs):
74-
raise ValidationError("Serialize on RefField is not supported.")
75-
76-
7729
class ComponentSchema(AssetSchema):
7830
schema = fields.Str(data_key="$schema", attribute="_schema")
7931
name = ComponentNameStr(required=True)

sdk/ml/azure-ai-ml/azure/ai/ml/_schema/component/flow.py

Lines changed: 0 additions & 89 deletions
This file was deleted.

sdk/ml/azure-ai-ml/azure/ai/ml/_schema/pipeline/component_job.py

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,6 @@
1515
AnonymousParallelComponentSchema,
1616
AnonymousSparkComponentSchema,
1717
ComponentFileRefField,
18-
ComponentYamlRefField,
1918
DataTransferCopyComponentFileRefField,
2019
ImportComponentFileRefField,
2120
ParallelComponentFileRefField,
@@ -29,7 +28,6 @@
2928
from ...exceptions import ValidationException
3029
from .._sweep.parameterized_sweep import ParameterizedSweepSchema
3130
from .._utils.data_binding_expression import support_data_binding_expression_for_fields
32-
from ..component.flow import FlowComponentSchema
3331
from ..core.fields import (
3432
ArmVersionedStr,
3533
ComputeField,
@@ -254,10 +252,6 @@ class ParallelSchema(BaseNodeSchema, ParameterizedParallelSchema):
254252
# component file reference
255253
ParallelComponentFileRefField(),
256254
],
257-
NodeType.FLOW_PARALLEL: [
258-
NestedField(FlowComponentSchema, unknown=INCLUDE, dump_only=True),
259-
ComponentYamlRefField(),
260-
],
261255
},
262256
plain_union_fields=[
263257
# for registry type assets

sdk/ml/azure-ai-ml/azure/ai/ml/constants/_common.py

Lines changed: 0 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -381,18 +381,6 @@ class CommonYamlFields:
381381
"""Schema."""
382382

383383

384-
class SchemaUrl:
385-
"""Schema urls.
386-
387-
Schema urls will be used in VSCode extension to validate yaml files. It will also be used to identify the
388-
corresponding entity type of a yaml file, especially for some internal yaml files.
389-
"""
390-
391-
PROMPTFLOW_PREFIX = "https://azuremlschemas.azureedge.net/promptflow/"
392-
PROMPTFLOW_FLOW = PROMPTFLOW_PREFIX + "latest/Flow.schema.json"
393-
PROMPTFLOW_RUN = PROMPTFLOW_PREFIX + "latest/Run.schema.json"
394-
395-
396384
class GitProperties:
397385
"""GitProperties is a class that defines the constants used by the SDK/CLI for Git operations.
398386

sdk/ml/azure-ai-ml/azure/ai/ml/constants/_component.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,6 @@ class NodeType(object):
2323
IMPORT = "import"
2424
SPARK = "spark"
2525
DATA_TRANSFER = "data_transfer"
26-
FLOW_PARALLEL = "promptflow_parallel"
2726
# Note: container is not a real component type,
2827
# only used to mark component from container data.
2928
_CONTAINER = "_container"

sdk/ml/azure-ai-ml/azure/ai/ml/entities/_builders/parallel.py

Lines changed: 10 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@
1717
from ...constants._common import ARM_ID_PREFIX
1818
from ...constants._component import NodeType
1919
from .._component.component import Component
20-
from .._component.flow import FlowComponent
2120
from .._component.parallel_component import ParallelComponent
2221
from .._inputs_outputs import Input, Output
2322
from .._job.job_resource_configuration import JobResourceConfiguration
@@ -123,28 +122,15 @@ def __init__(
123122
validate_attribute_type(attrs_to_check=locals(), attr_type_map=self._attr_type_map())
124123
kwargs.pop("type", None)
125124

126-
if isinstance(component, FlowComponent):
127-
# make input definition fit actual inputs for flow component
128-
with component._inputs._fit_inputs(inputs): # pylint: disable=protected-access
129-
BaseNode.__init__(
130-
self,
131-
type=NodeType.PARALLEL,
132-
component=component,
133-
inputs=inputs,
134-
outputs=outputs,
135-
compute=compute,
136-
**kwargs,
137-
)
138-
else:
139-
BaseNode.__init__(
140-
self,
141-
type=NodeType.PARALLEL,
142-
component=component,
143-
inputs=inputs,
144-
outputs=outputs,
145-
compute=compute,
146-
**kwargs,
147-
)
125+
BaseNode.__init__(
126+
self,
127+
type=NodeType.PARALLEL,
128+
component=component,
129+
inputs=inputs,
130+
outputs=outputs,
131+
compute=compute,
132+
**kwargs,
133+
)
148134
# init mark for _AttrDict
149135
self._init = True
150136

@@ -323,7 +309,7 @@ def set_resources(
323309
@classmethod
324310
def _attr_type_map(cls) -> dict:
325311
return {
326-
"component": (str, ParallelComponent, FlowComponent),
312+
"component": (str, ParallelComponent),
327313
"retry_settings": (dict, RetrySettings),
328314
"resources": (dict, JobResourceConfiguration),
329315
"task": (dict, ParallelTask),

0 commit comments

Comments
 (0)