Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
545 changes: 327 additions & 218 deletions eng/emitter-package-lock.json

Large diffs are not rendered by default.

27 changes: 14 additions & 13 deletions eng/emitter-package.json
Original file line number Diff line number Diff line change
@@ -1,24 +1,25 @@
{
"name": "dist/src/index.js",
"main": "dist/src/index.js",
"dependencies": {
"@azure-tools/typespec-python": "0.60.1"
"@azure-tools/typespec-python": "https://pkgs.dev.azure.com/azure-sdk/public/_packaging/azure-sdk-for-js-test-autorest@local/npm/registry/@azure-tools/typespec-python/-/typespec-python-0.60.1-alpha.20260220.1.tgz"
},
"devDependencies": {
"@azure-tools/typespec-autorest": "~0.65.0",
"@azure-tools/typespec-azure-core": "~0.65.0",
"@azure-tools/typespec-azure-resource-manager": "~0.65.0",
"@azure-tools/typespec-azure-rulesets": "~0.65.0",
"@azure-tools/typespec-client-generator-core": "~0.65.1",
"@typespec/compiler": "^1.9.0",
"@typespec/events": "~0.79.0",
"@typespec/http": "^1.9.0",
"@typespec/rest": "~0.79.0",
"@typespec/versioning": "~0.79.0",
"@typespec/openapi": "^1.9.0",
"@typespec/events": "~0.79.0",
"@typespec/rest": "~0.79.0",
"@typespec/sse": "~0.79.0",
"@typespec/streams": "~0.79.0",
"@typespec/xml": "~0.79.0",
"@azure-tools/openai-typespec": "1.8.0",
"@azure-tools/typespec-autorest": "~0.65.0",
"@azure-tools/typespec-azure-core": "~0.65.0",
"@azure-tools/typespec-azure-resource-manager": "~0.65.0",
"@azure-tools/typespec-azure-rulesets": "~0.65.0",
"@azure-tools/typespec-client-generator-core": "~0.65.0",
"@azure-tools/typespec-liftr-base": "0.11.0"
"@typespec/versioning": "~0.79.0",
"@typespec/xml": "~0.79.0"
},
"overrides": {
"@autorest/python": "https://pkgs.dev.azure.com/azure-sdk/public/_packaging/azure-sdk-for-js-test-autorest@local/npm/registry/@autorest/python/-/python-6.49.1-alpha.20260220.1.tgz"
}
}
5 changes: 4 additions & 1 deletion sdk/ai/azure-ai-agents/_metadata.json
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
{
"apiVersion": "2025-05-15-preview"
"apiVersion": "2025-05-15-preview",
"apiVersions": {
"Azure.AI.Agents": "2025-05-15-preview"
}
}
6 changes: 3 additions & 3 deletions sdk/ai/azure-ai-agents/azure/ai/agents/_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,9 +56,9 @@ class AgentsClient(_AgentsClientOperationsMixin): # pylint: disable=too-many-in
:type endpoint: str
:param credential: Credential used to authenticate requests to the service. Required.
:type credential: ~azure.core.credentials.TokenCredential
:keyword api_version: The API version to use for this operation. Default value is
"2025-05-15-preview". Note that overriding this default value may result in unsupported
behavior.
:keyword api_version: The API version to use for this operation. Known values are
"2025-05-15-preview" and None. Default value is "2025-05-15-preview". Note that overriding this
default value may result in unsupported behavior.
:paramtype api_version: str
"""

Expand Down
6 changes: 3 additions & 3 deletions sdk/ai/azure-ai-agents/azure/ai/agents/_configuration.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,9 @@ class AgentsClientConfiguration: # pylint: disable=too-many-instance-attributes
:type endpoint: str
:param credential: Credential used to authenticate requests to the service. Required.
:type credential: ~azure.core.credentials.TokenCredential
:keyword api_version: The API version to use for this operation. Default value is
"2025-05-15-preview". Note that overriding this default value may result in unsupported
behavior.
:keyword api_version: The API version to use for this operation. Known values are
"2025-05-15-preview" and None. Default value is "2025-05-15-preview". Note that overriding this
default value may result in unsupported behavior.
:paramtype api_version: str
"""

Expand Down
144 changes: 132 additions & 12 deletions sdk/ai/azure-ai-agents/azure/ai/agents/_utils/model_base.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
# pylint: disable=line-too-long,useless-suppression,too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
Expand Down Expand Up @@ -36,6 +37,7 @@

TZ_UTC = timezone.utc
_T = typing.TypeVar("_T")
_NONE_TYPE = type(None)


def _timedelta_as_isostr(td: timedelta) -> str:
Expand Down Expand Up @@ -170,6 +172,21 @@ def default(self, o): # pylint: disable=too-many-return-statements
r"(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s\d{4}\s\d{2}:\d{2}:\d{2}\sGMT"
)

_ARRAY_ENCODE_MAPPING = {
"pipeDelimited": "|",
"spaceDelimited": " ",
"commaDelimited": ",",
"newlineDelimited": "\n",
}


def _deserialize_array_encoded(delimit: str, attr):
if isinstance(attr, str):
if attr == "":
return []
return attr.split(delimit)
return attr


def _deserialize_datetime(attr: typing.Union[str, datetime]) -> datetime:
"""Deserialize ISO-8601 formatted string into Datetime object.
Expand Down Expand Up @@ -201,7 +218,7 @@ def _deserialize_datetime(attr: typing.Union[str, datetime]) -> datetime:
test_utc = date_obj.utctimetuple()
if test_utc.tm_year > 9999 or test_utc.tm_year < 1:
raise OverflowError("Hit max or min date")
return date_obj
return date_obj # type: ignore[no-any-return]


def _deserialize_datetime_rfc7231(attr: typing.Union[str, datetime]) -> datetime:
Expand Down Expand Up @@ -255,7 +272,7 @@ def _deserialize_time(attr: typing.Union[str, time]) -> time:
"""
if isinstance(attr, time):
return attr
return isodate.parse_time(attr)
return isodate.parse_time(attr) # type: ignore[no-any-return]


def _deserialize_bytes(attr):
Expand Down Expand Up @@ -314,6 +331,8 @@ def _deserialize_int_as_str(attr):
def get_deserializer(annotation: typing.Any, rf: typing.Optional["_RestField"] = None):
if annotation is int and rf and rf._format == "str":
return _deserialize_int_as_str
if annotation is str and rf and rf._format in _ARRAY_ENCODE_MAPPING:
return functools.partial(_deserialize_array_encoded, _ARRAY_ENCODE_MAPPING[rf._format])
if rf and rf._format:
return _DESERIALIZE_MAPPING_WITHFORMAT.get(rf._format)
return _DESERIALIZE_MAPPING.get(annotation) # pyright: ignore
Expand Down Expand Up @@ -352,9 +371,39 @@ def __contains__(self, key: typing.Any) -> bool:
return key in self._data

def __getitem__(self, key: str) -> typing.Any:
# If this key has been deserialized (for mutable types), we need to handle serialization
if hasattr(self, "_attr_to_rest_field"):
cache_attr = f"_deserialized_{key}"
if hasattr(self, cache_attr):
rf = _get_rest_field(getattr(self, "_attr_to_rest_field"), key)
if rf:
value = self._data.get(key)
if isinstance(value, (dict, list, set)):
# For mutable types, serialize and return
# But also update _data with serialized form and clear flag
# so mutations via this returned value affect _data
serialized = _serialize(value, rf._format)
# If serialized form is same type (no transformation needed),
# return _data directly so mutations work
if isinstance(serialized, type(value)) and serialized == value:
return self._data.get(key)
# Otherwise return serialized copy and clear flag
try:
object.__delattr__(self, cache_attr)
except AttributeError:
pass
# Store serialized form back
self._data[key] = serialized
return serialized
return self._data.__getitem__(key)

def __setitem__(self, key: str, value: typing.Any) -> None:
# Clear any cached deserialized value when setting through dictionary access
cache_attr = f"_deserialized_{key}"
try:
object.__delattr__(self, cache_attr)
except AttributeError:
pass
self._data.__setitem__(key, value)

def __delitem__(self, key: str) -> None:
Expand Down Expand Up @@ -482,6 +531,8 @@ def _is_model(obj: typing.Any) -> bool:

def _serialize(o, format: typing.Optional[str] = None): # pylint: disable=too-many-return-statements
if isinstance(o, list):
if format in _ARRAY_ENCODE_MAPPING and all(isinstance(x, str) for x in o):
return _ARRAY_ENCODE_MAPPING[format].join(o)
return [_serialize(x, format) for x in o]
if isinstance(o, dict):
return {k: _serialize(v, format) for k, v in o.items()}
Expand Down Expand Up @@ -637,6 +688,10 @@ def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self:
if not rf._rest_name_input:
rf._rest_name_input = attr
cls._attr_to_rest_field: dict[str, _RestField] = dict(attr_to_rest_field.items())
cls._backcompat_attr_to_rest_field: dict[str, _RestField] = {
Model._get_backcompat_attribute_name(cls._attr_to_rest_field, attr): rf
for attr, rf in cls._attr_to_rest_field.items()
}
cls._calculated.add(f"{cls.__module__}.{cls.__qualname__}")

return super().__new__(cls)
Expand All @@ -646,6 +701,16 @@ def __init_subclass__(cls, discriminator: typing.Optional[str] = None) -> None:
if hasattr(base, "__mapping__"):
base.__mapping__[discriminator or cls.__name__] = cls # type: ignore

@classmethod
def _get_backcompat_attribute_name(cls, attr_to_rest_field: dict[str, "_RestField"], attr_name: str) -> str:
rest_field_obj = attr_to_rest_field.get(attr_name) # pylint: disable=protected-access
if rest_field_obj is None:
return attr_name
original_tsp_name = getattr(rest_field_obj, "_original_tsp_name", None) # pylint: disable=protected-access
if original_tsp_name:
return original_tsp_name
return attr_name

@classmethod
def _get_discriminator(cls, exist_discriminators) -> typing.Optional["_RestField"]:
for v in cls.__dict__.values():
Expand Down Expand Up @@ -757,6 +822,14 @@ def _deserialize_multiple_sequence(
return type(obj)(_deserialize(deserializer, entry, module) for entry, deserializer in zip(obj, entry_deserializers))


def _is_array_encoded_deserializer(deserializer: functools.partial) -> bool:
return (
isinstance(deserializer, functools.partial)
and isinstance(deserializer.args[0], functools.partial)
and deserializer.args[0].func == _deserialize_array_encoded # pylint: disable=comparison-with-callable
)


def _deserialize_sequence(
deserializer: typing.Optional[typing.Callable],
module: typing.Optional[str],
Expand All @@ -766,6 +839,19 @@ def _deserialize_sequence(
return obj
if isinstance(obj, ET.Element):
obj = list(obj)

# encoded string may be deserialized to sequence
if isinstance(obj, str) and isinstance(deserializer, functools.partial):
# for list[str]
if _is_array_encoded_deserializer(deserializer):
return deserializer(obj)

# for list[Union[...]]
if isinstance(deserializer.args[0], list):
for sub_deserializer in deserializer.args[0]:
if _is_array_encoded_deserializer(sub_deserializer):
return sub_deserializer(obj)

return type(obj)(_deserialize(deserializer, entry, module) for entry in obj)


Expand Down Expand Up @@ -816,16 +902,16 @@ def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-retur

# is it optional?
try:
if any(a for a in annotation.__args__ if a == type(None)): # pyright: ignore
if any(a is _NONE_TYPE for a in annotation.__args__): # pyright: ignore
if len(annotation.__args__) <= 2: # pyright: ignore
if_obj_deserializer = _get_deserialize_callable_from_annotation(
next(a for a in annotation.__args__ if a != type(None)), module, rf # pyright: ignore
next(a for a in annotation.__args__ if a is not _NONE_TYPE), module, rf # pyright: ignore
)

return functools.partial(_deserialize_with_optional, if_obj_deserializer)
# the type is Optional[Union[...]], we need to remove the None type from the Union
annotation_copy = copy.copy(annotation)
annotation_copy.__args__ = [a for a in annotation_copy.__args__ if a != type(None)] # pyright: ignore
annotation_copy.__args__ = [a for a in annotation_copy.__args__ if a is not _NONE_TYPE] # pyright: ignore
return _get_deserialize_callable_from_annotation(annotation_copy, module, rf)
except AttributeError:
pass
Expand Down Expand Up @@ -951,7 +1037,7 @@ def _failsafe_deserialize(
) -> typing.Any:
try:
return _deserialize(deserializer, response.json(), module, rf, format)
except DeserializationError:
except Exception: # pylint: disable=broad-except
_LOGGER.warning(
"Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True
)
Expand All @@ -964,13 +1050,14 @@ def _failsafe_deserialize_xml(
) -> typing.Any:
try:
return _deserialize_xml(deserializer, response.text())
except DeserializationError:
except Exception: # pylint: disable=broad-except
_LOGGER.warning(
"Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True
)
return None


# pylint: disable=too-many-instance-attributes
class _RestField:
def __init__(
self,
Expand All @@ -983,6 +1070,7 @@ def __init__(
format: typing.Optional[str] = None,
is_multipart_file_input: bool = False,
xml: typing.Optional[dict[str, typing.Any]] = None,
original_tsp_name: typing.Optional[str] = None,
):
self._type = type
self._rest_name_input = name
Expand All @@ -994,10 +1082,15 @@ def __init__(
self._format = format
self._is_multipart_file_input = is_multipart_file_input
self._xml = xml if xml is not None else {}
self._original_tsp_name = original_tsp_name

@property
def _class_type(self) -> typing.Any:
return getattr(self._type, "args", [None])[0]
result = getattr(self._type, "args", [None])[0]
# type may be wrapped by nested functools.partial so we need to check for that
if isinstance(result, functools.partial):
return getattr(result, "args", [None])[0]
return result

@property
def _rest_name(self) -> str:
Expand All @@ -1008,14 +1101,37 @@ def _rest_name(self) -> str:
def __get__(self, obj: Model, type=None): # pylint: disable=redefined-builtin
# by this point, type and rest_name will have a value bc we default
# them in __new__ of the Model class
item = obj.get(self._rest_name)
# Use _data.get() directly to avoid triggering __getitem__ which clears the cache
item = obj._data.get(self._rest_name)
if item is None:
return item
if self._is_model:
return item
return _deserialize(self._type, _serialize(item, self._format), rf=self)

# For mutable types, we want mutations to directly affect _data
# Check if we've already deserialized this value
cache_attr = f"_deserialized_{self._rest_name}"
if hasattr(obj, cache_attr):
# Return the value from _data directly (it's been deserialized in place)
return obj._data.get(self._rest_name)

deserialized = _deserialize(self._type, _serialize(item, self._format), rf=self)

# For mutable types, store the deserialized value back in _data
# so mutations directly affect _data
if isinstance(deserialized, (dict, list, set)):
obj._data[self._rest_name] = deserialized
object.__setattr__(obj, cache_attr, True) # Mark as deserialized
return deserialized

return deserialized

def __set__(self, obj: Model, value) -> None:
# Clear the cached deserialized object when setting a new value
cache_attr = f"_deserialized_{self._rest_name}"
if hasattr(obj, cache_attr):
object.__delattr__(obj, cache_attr)

if value is None:
# we want to wipe out entries if users set attr to None
try:
Expand Down Expand Up @@ -1045,6 +1161,7 @@ def rest_field(
format: typing.Optional[str] = None,
is_multipart_file_input: bool = False,
xml: typing.Optional[dict[str, typing.Any]] = None,
original_tsp_name: typing.Optional[str] = None,
) -> typing.Any:
return _RestField(
name=name,
Expand All @@ -1054,6 +1171,7 @@ def rest_field(
format=format,
is_multipart_file_input=is_multipart_file_input,
xml=xml,
original_tsp_name=original_tsp_name,
)


Expand Down Expand Up @@ -1183,7 +1301,7 @@ def _get_wrapped_element(
_get_element(v, exclude_readonly, meta, wrapped_element)
else:
wrapped_element.text = _get_primitive_type_value(v)
return wrapped_element
return wrapped_element # type: ignore[no-any-return]


def _get_primitive_type_value(v) -> str:
Expand All @@ -1196,7 +1314,9 @@ def _get_primitive_type_value(v) -> str:
return str(v)


def _create_xml_element(tag, prefix=None, ns=None):
def _create_xml_element(
tag: typing.Any, prefix: typing.Optional[str] = None, ns: typing.Optional[str] = None
) -> ET.Element:
if prefix and ns:
ET.register_namespace(prefix, ns)
if ns:
Expand Down
Loading